Merge staging-next into staging

This commit is contained in:
Frederik Rietdijk 2020-01-11 09:33:08 +01:00
commit 263bc67bda
388 changed files with 14446 additions and 8556 deletions

1
.github/CODEOWNERS vendored
View File

@ -82,6 +82,7 @@
# Rust
/pkgs/development/compilers/rust @Mic92 @LnL7
/pkgs/build-support/rust @andir
# Darwin-related
/pkgs/stdenv/darwin @NixOS/darwin-maintainers

View File

@ -17,7 +17,3 @@
- [ ] Determined the impact on package closure size (by running `nix path-info -S` before and after)
- [ ] Ensured that relevant documentation is up to date
- [ ] Fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/.github/CONTRIBUTING.md).
###### Notify maintainers
cc @

View File

@ -13,7 +13,7 @@
</listitem>
<listitem>
<para>
Fork the repository on GitHub.
Fork <link xlink:href="https://github.com/nixos/nixpkgs/">the Nixpkgs repository</link> on GitHub.
</para>
</listitem>
<listitem>
@ -22,15 +22,10 @@
<itemizedlist>
<listitem>
<para>
You can make branch from a commit of your local <command>nixos-version</command>. That will help you to avoid additional local compilations. Because you will receive packages from binary cache.
<itemizedlist>
<listitem>
<para>
For example: <command>nixos-version</command> returns <command>15.05.git.0998212 (Dingo)</command>. So you can do:
</para>
</listitem>
</itemizedlist>
You can make branch from a commit of your local <command>nixos-version</command>. That will help you to avoid additional local compilations. Because you will receive packages from binary cache. For example
<screen>
<prompt>$ </prompt>nixos-version --hash
0998212
<prompt>$ </prompt>git checkout 0998212
<prompt>$ </prompt>git checkout -b 'fix/pkg-name-update'
</screen>
@ -47,13 +42,11 @@
<listitem>
<para>
Make commits of logical units.
<itemizedlist>
<listitem>
<para>
If you removed pkgs, made some major NixOS changes etc., write about them in <command>nixos/doc/manual/release-notes/rl-unstable.xml</command>.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
If you removed pkgs or made some major NixOS changes, write about it in the release notes for the next stable release. For example <command>nixos/doc/manual/release-notes/rl-2003.xml</command>.
</para>
</listitem>
<listitem>
@ -178,7 +171,7 @@ Additional information.
</listitem>
<listitem>
<para>
Rebase you branch against current <command>master</command>.
<link xlink:href="https://git-scm.com/book/en/v2/Git-Branching-Rebasing">Rebase</link> your branch against current <command>master</command>.
</para>
</listitem>
</itemizedlist>
@ -194,36 +187,12 @@ Additional information.
</listitem>
<listitem>
<para>
Create pull request:
<itemizedlist>
<listitem>
<para>
Write the title in format <command>(pkg-name | nixos/&lt;module>): improvement</command>.
<itemizedlist>
<listitem>
<para>
If you update the pkg, write versions <command>from -> to</command>.
</para>
</listitem>
</itemizedlist>
</para>
</listitem>
<listitem>
<para>
Write in comment if you have tested your patch. Do not rely much on <command>TravisCI</command>.
</para>
</listitem>
<listitem>
<para>
If you make an improvement, write about your motivation.
</para>
</listitem>
<listitem>
<para>
Notify maintainers of the package. For example add to the message: <command>cc @jagajaga @domenkozar</command>.
</para>
</listitem>
</itemizedlist>
Create the pull request
</para>
</listitem>
<listitem>
<para>
Follow <link xlink:href="https://github.com/NixOS/nixpkgs/blob/master/.github/CONTRIBUTING.md#submitting-changes">the contribution guidelines</link>.
</para>
</listitem>
</itemizedlist>

View File

@ -84,37 +84,36 @@ nix-env -qaP -A nixos.haskellPackages
nix-env -iA nixos.haskellPackages.cabal-install
```
Our current default compiler is GHC 7.10.x and the `haskellPackages` set
contains packages built with that particular version. Nixpkgs contains the
latest major release of every GHC since 6.10.4, however, and there is a whole
family of package sets available that defines Hackage packages built with each
of those compilers, too:
Our current default compiler is GHC 8.6.x and the `haskellPackages` set
contains packages built with that particular version. Nixpkgs contains the last
three major releases of GHC and there is a whole family of package sets
available that defines Hackage packages built with each of those compilers,
too:
```shell
nix-env -f "<nixpkgs>" -qaP -A haskell.packages.ghc6123
nix-env -f "<nixpkgs>" -qaP -A haskell.packages.ghc763
nix-env -f "<nixpkgs>" -qaP -A haskell.packages.ghc844
nix-env -f "<nixpkgs>" -qaP -A haskell.packages.ghc882
```
The name `haskellPackages` is really just a synonym for
`haskell.packages.ghc7102`, because we prefer that package set internally and
`haskell.packages.ghc865`, because we prefer that package set internally and
recommend it to our users as their default choice, but ultimately you are free
to compile your Haskell packages with any GHC version you please. The following
command displays the complete list of available compilers:
```
$ nix-env -f "<nixpkgs>" -qaP -A haskell.compiler
haskell.compiler.ghc822 ghc-8.2.2
haskell.compiler.integer-simple.ghc822 ghc-8.2.2
haskell.compiler.ghc8101 ghc-8.10.0.20191210
haskell.compiler.integer-simple.ghc8101 ghc-8.10.0.20191210
haskell.compiler.ghcHEAD ghc-8.10.20191119
haskell.compiler.integer-simple.ghcHEAD ghc-8.10.20191119
haskell.compiler.ghc822Binary ghc-8.2.2-binary
haskell.compiler.ghc844 ghc-8.4.4
haskell.compiler.ghc863Binary ghc-8.6.3-binary
haskell.compiler.ghc864 ghc-8.6.4
haskell.compiler.integer-simple.ghc864 ghc-8.6.4
haskell.compiler.ghc865 ghc-8.6.5
haskell.compiler.integer-simple.ghc865 ghc-8.6.5
haskell.compiler.ghc881 ghc-8.8.1
haskell.compiler.integer-simple.ghc881 ghc-8.8.1
haskell.compiler.ghcHEAD ghc-8.9.20190601
haskell.compiler.integer-simple.ghcHEAD ghc-8.9.20190601
haskell.compiler.ghcjs84 ghcjs-8.4.0.1
haskell.compiler.ghc882 ghc-8.8.1.20191211
haskell.compiler.integer-simple.ghc882 ghc-8.8.1.20191211
haskell.compiler.ghcjs ghcjs-8.6.0.1
```

View File

@ -499,7 +499,7 @@ and in this case the `python35` interpreter is automatically used.
Versions 2.7, 3.5, 3.6, 3.7 and 3.8 of the CPython interpreter are available as
respectively `python27`, `python35`, `python36`, `python37` and `python38`. The aliases
`python2` and `python3` correspond to respectively `python27` and
`python38`. The default interpreter, `python`, maps to `python2`. The PyPy
`python37`. The default interpreter, `python`, maps to `python2`. The PyPy
interpreters compatible with Python 2.7 and 3 are available as `pypy27` and
`pypy3`, with aliases `pypy2` mapping to `pypy27` and `pypy` mapping to
`pypy2`. The Nix expressions for the interpreters can be
@ -821,6 +821,9 @@ should be used with `ignoreCollisions = true`.
The following are setup hooks specifically for Python packages. Most of these are
used in `buildPythonPackage`.
- `eggUnpackhook` to move an egg to the correct folder so it can be installed with the `eggInstallHook`
- `eggBuildHook` to skip building for eggs.
- `eggInstallHook` to install eggs.
- `flitBuildHook` to build a wheel using `flit`.
- `pipBuildHook` to build a wheel using `pip` and PEP 517. Note a build system (e.g. `setuptools` or `flit`) should still be added as `nativeBuildInput`.
- `pipInstallHook` to install wheels.

View File

@ -101,7 +101,7 @@ let
cleanSource sourceByRegex sourceFilesBySuffices
commitIdFromGitRepo cleanSourceWith pathHasContext
canCleanSource;
inherit (modules) evalModules closeModules unifyModuleSyntax
inherit (modules) evalModules unifyModuleSyntax
applyIfFunction mergeModules
mergeModules' mergeOptionDecls evalOptionValue mergeDefinitions
pushDownProperties dischargeProperties filterOverrides
@ -110,7 +110,7 @@ let
mkFixStrictness mkOrder mkBefore mkAfter mkAliasDefinitions
mkAliasAndWrapDefinitions fixMergeModules mkRemovedOptionModule
mkRenamedOptionModule mkMergedOptionModule mkChangedOptionModule
mkAliasOptionModule doRename filterModules;
mkAliasOptionModule doRename;
inherit (options) isOption mkEnableOption mkSinkUndeclaredOptions
mergeDefaultOption mergeOneOption mergeEqualOption getValues
getFiles optionAttrSetToDocList optionAttrSetToDocList'

View File

@ -41,7 +41,13 @@ rec {
options = {
_module.args = mkOption {
type = types.attrsOf types.unspecified;
# Because things like `mkIf` are entirely useless for
# `_module.args` (because there's no way modules can check which
# arguments were passed), we'll use `lazyAttrsOf` which drops
# support for that, in turn it's lazy in its values. This means e.g.
# a `_module.args.pkgs = import (fetchTarball { ... }) {}` won't
# start a download when `pkgs` wasn't evaluated.
type = types.lazyAttrsOf types.unspecified;
internal = true;
description = "Arguments passed to each module.";
};
@ -59,9 +65,12 @@ rec {
};
};
closed = closeModules (modules ++ [ internalModule ]) ({ inherit config options lib; } // specialArgs);
collected = collectModules
(specialArgs.modulesPath or "")
(modules ++ [ internalModule ])
({ inherit config options lib; } // specialArgs);
options = mergeModules prefix (reverseList (filterModules (specialArgs.modulesPath or "") closed));
options = mergeModules prefix (reverseList collected);
# Traverse options and extract the option values into the final
# config set. At the same time, check whether all option
@ -87,31 +96,76 @@ rec {
result = { inherit options config; };
in result;
# collectModules :: (modulesPath: String) -> (modules: [ Module ]) -> (args: Attrs) -> [ Module ]
#
# Collects all modules recursively through `import` statements, filtering out
# all modules in disabledModules.
collectModules = let
# Filter disabled modules. Modules can be disabled allowing
# their implementation to be replaced.
filterModules = modulesPath: modules:
let
moduleKey = m: if isString m then toString modulesPath + "/" + m else toString m;
disabledKeys = map moduleKey (concatMap (m: m.disabledModules) modules);
in
filter (m: !(elem m.key disabledKeys)) modules;
# Like unifyModuleSyntax, but also imports paths and calls functions if necessary
loadModule = args: fallbackFile: fallbackKey: m:
if isFunction m || isAttrs m then
unifyModuleSyntax fallbackFile fallbackKey (applyIfFunction fallbackKey m args)
else unifyModuleSyntax (toString m) (toString m) (applyIfFunction (toString m) (import m) args);
/* Close a set of modules under the imports relation. */
closeModules = modules: args:
let
toClosureList = file: parentKey: imap1 (n: x:
if isAttrs x || isFunction x then
let key = "${parentKey}:anon-${toString n}"; in
unifyModuleSyntax file key (applyIfFunction key x args)
else
let file = toString x; key = toString x; in
unifyModuleSyntax file key (applyIfFunction key (import x) args));
in
builtins.genericClosure {
startSet = toClosureList unknownModule "" modules;
operator = m: toClosureList m._file m.key m.imports;
};
/*
Collects all modules recursively into the form
{
disabled = [ <list of disabled modules> ];
# All modules of the main module list
modules = [
{
key = <key1>;
module = <module for key1>;
# All modules imported by the module for key1
modules = [
{
key = <key1-1>;
module = <module for key1-1>;
# All modules imported by the module for key1-1
modules = [ ... ];
}
...
];
}
...
];
}
*/
collectStructuredModules =
let
collectResults = modules: {
disabled = concatLists (catAttrs "disabled" modules);
inherit modules;
};
in parentFile: parentKey: initialModules: args: collectResults (imap1 (n: x:
let
module = loadModule args parentFile "${parentKey}:anon-${toString n}" x;
collectedImports = collectStructuredModules module._file module.key module.imports args;
in {
key = module.key;
module = module;
modules = collectedImports.modules;
disabled = module.disabledModules ++ collectedImports.disabled;
}) initialModules);
# filterModules :: String -> { disabled, modules } -> [ Module ]
#
# Filters a structure as emitted by collectStructuredModules by removing all disabled
# modules recursively. It returns the final list of unique-by-key modules
filterModules = modulesPath: { disabled, modules }:
let
moduleKey = m: if isString m then toString modulesPath + "/" + m else toString m;
disabledKeys = map moduleKey disabled;
keyFilter = filter (attrs: ! elem attrs.key disabledKeys);
in map (attrs: attrs.module) (builtins.genericClosure {
startSet = keyFilter modules;
operator = attrs: keyFilter attrs.modules;
});
in modulesPath: initialModules: args:
filterModules modulesPath (collectStructuredModules unknownModule "" initialModules args);
/* Massage a module into canonical form, that is, a set consisting
of options, config and imports attributes. */
@ -123,7 +177,7 @@ rec {
if m ? config || m ? options then
let badAttrs = removeAttrs m ["_file" "key" "disabledModules" "imports" "options" "config" "meta"]; in
if badAttrs != {} then
throw "Module `${key}' has an unsupported attribute `${head (attrNames badAttrs)}'. This is caused by assignments to the top-level attributes `config' or `options'."
throw "Module `${key}' has an unsupported attribute `${head (attrNames badAttrs)}'. This is caused by introducing a top-level `config' or `options' attribute. Add configuration attributes immediately on the top level instead, or move all of them (namely: ${toString (attrNames badAttrs)}) into the explicit `config' attribute."
else
{ _file = m._file or file;
key = toString m.key or key;
@ -317,16 +371,9 @@ rec {
else
mergeDefinitions loc opt.type defs';
# The value with a check that it is defined
valueDefined = if res.isDefined then res.mergedValue else
# (nixos-option detects this specific error message and gives it special
# handling. If changed here, please change it there too.)
throw "The option `${showOption loc}' is used but not defined.";
# Apply the 'apply' function to the merged value. This allows options to
# yield a value computed from the definitions
value = if opt ? apply then opt.apply valueDefined else valueDefined;
value = if opt ? apply then opt.apply res.mergedValue else res.mergedValue;
in opt //
{ value = builtins.addErrorContext "while evaluating the option `${showOption loc}':" value;
@ -360,11 +407,17 @@ rec {
};
defsFinal = defsFinal'.values;
# Type-check the remaining definitions, and merge them.
mergedValue = foldl' (res: def:
if type.check def.value then res
else throw "The option value `${showOption loc}' in `${def.file}' is not of type `${type.description}'.")
(type.merge loc defsFinal) defsFinal;
# Type-check the remaining definitions, and merge them. Or throw if no definitions.
mergedValue =
if isDefined then
foldl' (res: def:
if type.check def.value then res
else throw "The option value `${showOption loc}' in `${def.file}' is not of type `${type.description}'."
) (type.merge loc defsFinal) defsFinal
else
# (nixos-option detects this specific error message and gives it special
# handling. If changed here, please change it there too.)
throw "The option `${showOption loc}' is used but not defined.";
isDefined = defsFinal != [];

View File

@ -12,7 +12,7 @@ evalConfig() {
local attr=$1
shift;
local script="import ./default.nix { modules = [ $@ ];}"
nix-instantiate --timeout 1 -E "$script" -A "$attr" --eval-only --show-trace
nix-instantiate --timeout 1 -E "$script" -A "$attr" --eval-only --show-trace --read-write-mode
}
reportFailure() {
@ -177,6 +177,24 @@ checkConfigOutput "true" config.submodule.outer ./declare-submoduleWith-modules.
# Temporarily disabled until https://github.com/NixOS/nixpkgs/pull/76861
#checkConfigOutput "true" config.submodule.enable ./declare-submoduleWith-path.nix
# Check that disabledModules works recursively and correctly
checkConfigOutput "true" config.enable ./disable-recursive/main.nix
checkConfigOutput "true" config.enable ./disable-recursive/{main.nix,disable-foo.nix}
checkConfigOutput "true" config.enable ./disable-recursive/{main.nix,disable-bar.nix}
checkConfigError 'The option .* defined in .* does not exist' config.enable ./disable-recursive/{main.nix,disable-foo.nix,disable-bar.nix}
# Check that imports can depend on derivations
checkConfigOutput "true" config.enable ./import-from-store.nix
# Check attrsOf and lazyAttrsOf. Only lazyAttrsOf should be lazy, and only
# attrsOf should work with conditional definitions
# In addition, lazyAttrsOf should honor an options emptyValue
checkConfigError "is not lazy" config.isLazy ./declare-attrsOf.nix ./attrsOf-lazy-check.nix
checkConfigOutput "true" config.isLazy ./declare-lazyAttrsOf.nix ./attrsOf-lazy-check.nix
checkConfigOutput "true" config.conditionalWorks ./declare-attrsOf.nix ./attrsOf-conditional-check.nix
checkConfigOutput "false" config.conditionalWorks ./declare-lazyAttrsOf.nix ./attrsOf-conditional-check.nix
checkConfigOutput "empty" config.value.foo ./declare-lazyAttrsOf.nix ./attrsOf-conditional-check.nix
cat <<EOF
====== module tests ======
$pass Pass

View File

@ -0,0 +1,7 @@
{ lib, config, ... }: {
options.conditionalWorks = lib.mkOption {
default = ! config.value ? foo;
};
config.value.foo = lib.mkIf false "should not be defined";
}

View File

@ -0,0 +1,7 @@
{ lib, config, ... }: {
options.isLazy = lib.mkOption {
default = ! config.value ? foo;
};
config.value.bar = throw "is not lazy";
}

View File

@ -0,0 +1,6 @@
{ lib, ... }: {
options.value = lib.mkOption {
type = lib.types.attrsOf lib.types.str;
default = {};
};
}

View File

@ -0,0 +1,6 @@
{ lib, ... }: {
options.value = lib.mkOption {
type = lib.types.lazyAttrsOf (lib.types.str // { emptyValue.value = "empty"; });
default = {};
};
}

View File

@ -0,0 +1,5 @@
{
imports = [
../declare-enable.nix
];
}

View File

@ -0,0 +1,7 @@
{
disabledModules = [
./bar.nix
];
}

View File

@ -0,0 +1,7 @@
{
disabledModules = [
./foo.nix
];
}

View File

@ -0,0 +1,5 @@
{
imports = [
../declare-enable.nix
];
}

View File

@ -0,0 +1,8 @@
{
imports = [
./foo.nix
./bar.nix
];
enable = true;
}

View File

@ -0,0 +1,11 @@
{ lib, ... }:
{
imports = [
"${builtins.toFile "drv" "{}"}"
./declare-enable.nix
./define-enable.nix
];
}

View File

@ -65,6 +65,11 @@ rec {
# definition values and locations (e.g. [ { file = "/foo.nix";
# value = 1; } { file = "/bar.nix"; value = 2 } ]).
merge ? mergeDefaultOption
, # Whether this type has a value representing nothingness. If it does,
# this should be a value of the form { value = <the nothing value>; }
# If it doesn't, this should be {}
# This may be used when a value is required for `mkIf false`. This allows the extra laziness in e.g. `lazyAttrsOf`.
emptyValue ? {}
, # Return a flat list of sub-options. Used to generate
# documentation.
getSubOptions ? prefix: {}
@ -88,7 +93,7 @@ rec {
functor ? defaultFunctor name
}:
{ _type = "option-type";
inherit name check merge getSubOptions getSubModules substSubModules typeMerge functor;
inherit name check merge emptyValue getSubOptions getSubModules substSubModules typeMerge functor;
description = if description == null then name else description;
};
@ -225,6 +230,7 @@ rec {
description = "attribute set";
check = isAttrs;
merge = loc: foldl' (res: def: mergeAttrs res def.value) {};
emptyValue = { value = {}; };
};
# derivation is a reserved keyword.
@ -265,6 +271,7 @@ rec {
) def.value
else
throw "The option value `${showOption loc}` in `${def.file}` is not a list.") defs)));
emptyValue = { value = {}; };
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["*"]);
getSubModules = elemType.getSubModules;
substSubModules = m: listOf (elemType.substSubModules m);
@ -273,7 +280,10 @@ rec {
nonEmptyListOf = elemType:
let list = addCheck (types.listOf elemType) (l: l != []);
in list // { description = "non-empty " + list.description; };
in list // {
description = "non-empty " + list.description;
# Note: emptyValue is left as is, because another module may define an element.
};
attrsOf = elemType: mkOptionType rec {
name = "attrsOf";
@ -285,12 +295,37 @@ rec {
)
# Push down position info.
(map (def: mapAttrs (n: v: { inherit (def) file; value = v; }) def.value) defs)));
emptyValue = { value = {}; };
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name>"]);
getSubModules = elemType.getSubModules;
substSubModules = m: attrsOf (elemType.substSubModules m);
functor = (defaultFunctor name) // { wrapped = elemType; };
};
# A version of attrsOf that's lazy in its values at the expense of
# conditional definitions not working properly. E.g. defining a value with
# `foo.attr = mkIf false 10`, then `foo ? attr == true`, whereas with
# attrsOf it would correctly be `false`. Accessing `foo.attr` would throw an
# error that it's not defined. Use only if conditional definitions don't make sense.
lazyAttrsOf = elemType: mkOptionType rec {
name = "lazyAttrsOf";
description = "lazy attribute set of ${elemType.description}s";
check = isAttrs;
merge = loc: defs:
zipAttrsWith (name: defs:
let merged = mergeDefinitions (loc ++ [name]) elemType defs;
# mergedValue will trigger an appropriate error when accessed
in merged.optionalValue.value or elemType.emptyValue.value or merged.mergedValue
)
# Push down position info.
(map (def: mapAttrs (n: v: { inherit (def) file; value = v; }) def.value) defs);
emptyValue = { value = {}; };
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name>"]);
getSubModules = elemType.getSubModules;
substSubModules = m: lazyAttrsOf (elemType.substSubModules m);
functor = (defaultFunctor name) // { wrapped = elemType; };
};
# List or attribute set of ...
loaOf = elemType:
let
@ -339,6 +374,7 @@ rec {
description = "list or attribute set of ${elemType.description}s";
check = x: isList x || isAttrs x;
merge = loc: defs: attrOnly.merge loc (convertAllLists loc defs);
emptyValue = { value = {}; };
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name?>"]);
getSubModules = elemType.getSubModules;
substSubModules = m: loaOf (elemType.substSubModules m);
@ -350,6 +386,7 @@ rec {
name = "uniq";
inherit (elemType) description check;
merge = mergeOneOption;
emptyValue = elemType.emptyValue;
getSubOptions = elemType.getSubOptions;
getSubModules = elemType.getSubModules;
substSubModules = m: uniq (elemType.substSubModules m);
@ -367,6 +404,7 @@ rec {
else if nrNulls != 0 then
throw "The option `${showOption loc}` is defined both null and not null, in ${showFiles (getFiles defs)}."
else elemType.merge loc defs;
emptyValue = { value = null; };
getSubOptions = elemType.getSubOptions;
getSubModules = elemType.getSubModules;
substSubModules = m: nullOr (elemType.substSubModules m);
@ -407,6 +445,7 @@ rec {
args.name = last loc;
prefix = loc;
}).config;
emptyValue = { value = {}; };
getSubOptions = prefix: (evalModules
{ inherit modules prefix specialArgs;
# This is a work-around due to the fact that some sub-modules,
@ -515,6 +554,7 @@ rec {
if finalType.check val then val
else coerceFunc val;
in finalType.merge loc (map (def: def // { value = coerceVal def.value; }) defs);
emptyValue = finalType.emptyValue;
getSubOptions = finalType.getSubOptions;
getSubModules = finalType.getSubModules;
substSubModules = m: coercedTo coercedType coerceFunc (finalType.substSubModules m);

View File

@ -1764,6 +1764,12 @@
githubId = 11946442;
name = "Dipin Hora";
};
dirkx = {
email = "dirkx@webweaving.org";
github = "dirkx";
githubId = 392583;
name = "Dirk-Willem van Gulik";
};
disassembler = {
email = "disasm@gmail.com";
github = "disassembler";
@ -1983,6 +1989,12 @@
githubId = 50854;
name = "edef";
};
emantor = {
email = "rouven+nixos@czerwinskis.de";
github = "emantor";
githubId = 934284;
name = "Rouven Czerwinski";
};
embr = {
email = "hi@liclac.eu";
github = "liclac";
@ -4580,6 +4592,12 @@
githubId = 3958340;
name = "Eshin Kunishima";
};
mildlyincompetent = {
email = "nix@kch.dev";
github = "mildlyincompetent";
githubId = 19479662;
name = "Kajetan Champlewski";
};
miltador = {
email = "miltador@yandex.ua";
name = "Vasiliy Solovey";
@ -6847,6 +6865,11 @@
githubId = 863327;
name = "Tyler Benster";
};
tckmn = {
email = "andy@tck.mn";
github = "tckmn";
name = "Andy Tockman";
};
teh = {
email = "tehunger@gmail.com";
github = "teh";
@ -7848,4 +7871,10 @@
githubId = 52650;
name = "Marc Busqué";
};
snglth = {
email = "illia@ishestakov.com";
github = "snglth";
githubId = 8686360;
name = "Illia Shestakov";
};
}

View File

@ -352,6 +352,36 @@
An attribute set of where all the values are of
<replaceable>t</replaceable> type. Multiple definitions result in the
joined attribute set.
<note><para>
This type is <emphasis>strict</emphasis> in its values, which in turn
means attributes cannot depend on other attributes. See <varname>
types.lazyAttrsOf</varname> for a lazy version.
</para></note>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>types.lazyAttrsOf</varname> <replaceable>t</replaceable>
</term>
<listitem>
<para>
An attribute set of where all the values are of
<replaceable>t</replaceable> type. Multiple definitions result in the
joined attribute set. This is the lazy version of <varname>types.attrsOf
</varname>, allowing attributes to depend on each other.
<warning><para>
This version does not fully support conditional definitions! With an
option <varname>foo</varname> of this type and a definition
<literal>foo.attr = lib.mkIf false 10</literal>, evaluating
<literal>foo ? attr</literal> will return <literal>true</literal>
even though it should be false. Accessing the value will then throw
an error. For types <replaceable>t</replaceable> that have an
<literal>emptyValue</literal> defined, that value will be returned
instead of throwing an error. So if the type of <literal>foo.attr</literal>
was <literal>lazyAttrsOf (nullOr int)</literal>, <literal>null</literal>
would be returned instead for the same <literal>mkIf false</literal> definition.
</para></warning>
</para>
</listitem>
</varlistentry>

View File

@ -6,8 +6,8 @@
<title>Replace Modules</title>
<para>
Modules that are imported can also be disabled. The option declarations and
config implementation of a disabled module will be ignored, allowing another
Modules that are imported can also be disabled. The option declarations,
config implementation and the imports of a disabled module will be ignored, allowing another
to take it's place. This can be used to import a set of modules from another
channel while keeping the rest of the system on a stable release.
</para>

View File

@ -357,6 +357,40 @@ services.xserver.displayManager.defaultSession = "xfce+icewm";
<link linkend="opt-services.httpd.virtualHosts">services.httpd.virtualHosts.&lt;name&gt;.useACMEHost</link>.
</para>
</listitem>
<listitem>
<para>
For NixOS configuration options, the <literal>loaOf</literal> type has
been deprecated and will be removed in a future release. In nixpkgs,
options of this type will be changed to <literal>attrsOf</literal>
instead. If you were using one of these in your configuration, you will
see a warning suggesting what changes will be required.
</para>
<para>
For example, <link linkend="opt-users.users">users.users</link> is a
<literal>loaOf</literal> option that is commonly used as follows:
<programlisting>
users.users =
[ { name = "me";
description = "My personal user.";
isNormalUser = true;
}
];
</programlisting>
This should be rewritten by removing the list and using the
value of <literal>name</literal> as the name of the attribute set:
<programlisting>
users.users.me =
{ description = "My personal user.";
isNormalUser = true;
};
</programlisting>
</para>
<para>
For more information on this change have look at these links:
<link xlink:href="https://github.com/NixOS/nixpkgs/issues/1800">issue #1800</link>,
<link xlink:href="https://github.com/NixOS/nixpkgs/pull/63103">PR #63103</link>.
</para>
</listitem>
</itemizedlist>
</section>

View File

@ -1,6 +1,6 @@
#! /bin/sh -e
#! /bin/sh -eu
export NIX_PATH=nixpkgs=../../../..
export NIX_PATH=nixpkgs=$(dirname $(readlink -f $0))/../../../..
export NIXOS_CONFIG=$(dirname $(readlink -f $0))/../../../modules/virtualisation/azure-image.nix
export TIMESTAMP=$(date +%Y%m%d%H%M)

View File

@ -159,10 +159,12 @@ in
# extraGroups = [ "wheel" ]; # Enable sudo for the user.
# };
# This value determines the NixOS release with which your system is to be
# compatible, in order to avoid breaking some software such as database
# servers. You should change this only after NixOS release notes say you
# should.
# This value determines the NixOS release from which the default
# settings for stateful data, like file locations and database versions
# on your system were taken. Its perfectly fine and recommended to leave
# this value at the release version of the first install of this system.
# Before changing this value read the documentation for this option
# (e.g. man configuration.nix or on https://nixos.org/nixos/options.html).
system.stateVersion = "${config.system.nixos.release}"; # Did you read the comment?
}

View File

@ -1,4 +1,4 @@
{ config, lib, pkgs, baseModules, extraModules, modules, ... }:
{ config, lib, pkgs, baseModules, extraModules, modules, modulesPath, ... }:
with lib;
@ -22,7 +22,10 @@ let
scrubbedEval = evalModules {
modules = [ { nixpkgs.localSystem = config.nixpkgs.localSystem; } ] ++ manualModules;
args = (config._module.args) // { modules = [ ]; };
specialArgs = { pkgs = scrubDerivations "pkgs" pkgs; };
specialArgs = {
pkgs = scrubDerivations "pkgs" pkgs;
inherit modulesPath;
};
};
scrubDerivations = namePrefix: pkgSet: mapAttrs
(name: value:

View File

@ -61,11 +61,18 @@ in
configuration defaults in a way incompatible with stateful
data. For instance, if the default version of PostgreSQL
changes, the new version will probably be unable to read your
existing databases. To prevent such breakage, you can set the
existing databases. To prevent such breakage, you should set the
value of this option to the NixOS release with which you want
to be compatible. The effect is that NixOS will option
to be compatible. The effect is that NixOS will use
defaults corresponding to the specified release (such as using
an older version of PostgreSQL).
Its perfectly fine and recommended to leave this value at the
release version of the first install of this system.
Changing this option will not upgrade your system. In fact it
is meant to stay constant exactly when you upgrade your system.
You should only bump this option, if you are sure that you can
or have migrated all state on your system which is affected
by this option.
'';
};

View File

@ -806,6 +806,7 @@
./services/web-apps/gotify-server.nix
./services/web-apps/icingaweb2/icingaweb2.nix
./services/web-apps/icingaweb2/module-monitoring.nix
./services/web-apps/ihatemoney
./services/web-apps/limesurvey.nix
./services/web-apps/mattermost.nix
./services/web-apps/mediawiki.nix

View File

@ -27,6 +27,7 @@ in
environment.etc.screenrc.text = cfg.screenrc;
environment.systemPackages = [ pkgs.screen ];
security.pam.services.screen = {};
};
}

View File

@ -776,11 +776,8 @@ in
'';
# Most of these should be moved to specific modules.
cups = {};
ftp = {};
i3lock = {};
i3lock-color = {};
screen = {};
vlock = {};
xlock = {};
xscreensaver = {};

View File

@ -320,6 +320,8 @@ in
Type = if hasNotify then "notify" else "simple";
RuntimeDirectory = "mysqld";
RuntimeDirectoryMode = "0755";
Restart = "on-abort";
RestartSec = "5s";
# The last two environment variables are used for starting Galera clusters
ExecStart = "${mysql}/bin/mysqld --defaults-file=/etc/my.cnf ${mysqldOptions} $_WSREP_NEW_CLUSTER $_WSREP_START_POSITION";
ExecStartPost =

View File

@ -5,7 +5,6 @@ with lib;
let
cfg = config.services.spamassassin;
spamassassin-local-cf = pkgs.writeText "local.cf" cfg.config;
spamassassin-init-pre = pkgs.writeText "init.pre" cfg.initPreConf;
spamdEnv = pkgs.buildEnv {
name = "spamd-env";
@ -65,8 +64,9 @@ in
};
initPreConf = mkOption {
type = types.str;
type = with types; either str path;
description = "The SpamAssassin init.pre config.";
apply = val: if builtins.isPath val then val else pkgs.writeText "init.pre" val;
default =
''
#

View File

@ -11,6 +11,9 @@ let
(recursiveUpdate defaultConfig cfg.config) else cfg.config));
configFile = pkgs.runCommand "configuration.yaml" { preferLocalBuild = true; } ''
${pkgs.remarshal}/bin/json2yaml -i ${configJSON} -o $out
# Hack to support secrets, that are encoded as custom yaml objects,
# https://www.home-assistant.io/docs/configuration/secrets/
sed -i -e "s/'\!secret \(.*\)'/\!secret \1/" $out
'';
lovelaceConfigJSON = pkgs.writeText "ui-lovelace.json"
@ -98,6 +101,10 @@ in {
{
homeassistant = {
name = "Home";
latitude = "!secret latitude";
longitude = "!secret longitude";
elevation = "!secret elevation";
unit_system = "metric";
time_zone = "UTC";
};
frontend = { };
@ -108,6 +115,8 @@ in {
description = ''
Your <filename>configuration.yaml</filename> as a Nix attribute set.
Beware that setting this option will delete your previous <filename>configuration.yaml</filename>.
<link xlink:href="https://www.home-assistant.io/docs/configuration/secrets/">Secrets</link>
are encoded as strings as shown in the example.
'';
};

View File

@ -657,8 +657,7 @@ in {
};
config = mkIf cfg.enable {
users.users.matrix-synapse =
{ name = "";
users.users.matrix-synapse = {
group = "matrix-synapse";
home = cfg.dataDir;
createHome = true;

View File

@ -42,6 +42,7 @@ in
services.gnunet = {
enable = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the GNUnet daemon. GNUnet is GNU's anonymous
@ -51,6 +52,7 @@ in
fileSharing = {
quota = mkOption {
type = types.int;
default = 1024;
description = ''
Maximum file system usage (in MiB) for file sharing.
@ -60,6 +62,7 @@ in
udp = {
port = mkOption {
type = types.port;
default = 2086; # assigned by IANA
description = ''
The UDP port for use by GNUnet.
@ -69,6 +72,7 @@ in
tcp = {
port = mkOption {
type = types.port;
default = 2086; # assigned by IANA
description = ''
The TCP port for use by GNUnet.
@ -78,6 +82,7 @@ in
load = {
maxNetDownBandwidth = mkOption {
type = types.int;
default = 50000;
description = ''
Maximum bandwidth usage (in bits per second) for GNUnet
@ -86,6 +91,7 @@ in
};
maxNetUpBandwidth = mkOption {
type = types.int;
default = 50000;
description = ''
Maximum bandwidth usage (in bits per second) for GNUnet
@ -94,6 +100,7 @@ in
};
hardNetUpBandwidth = mkOption {
type = types.int;
default = 0;
description = ''
Hard bandwidth limit (in bits per second) when uploading
@ -111,6 +118,7 @@ in
};
extraOptions = mkOption {
type = types.lines;
default = "";
description = ''
Additional options that will be copied verbatim in `gnunet.conf'.

View File

@ -104,7 +104,6 @@ in {
users.groups.mxisd =
{
name = "";
gid = config.ids.gids.mxisd;
};

View File

@ -25,6 +25,7 @@ in
services.tor.tsocks = {
enable = mkOption {
type = types.bool;
default = false;
description = ''
Whether to build tsocks wrapper script to relay application traffic via Tor.
@ -40,6 +41,7 @@ in
};
server = mkOption {
type = types.str;
default = "localhost:9050";
example = "192.168.0.20";
description = ''
@ -48,6 +50,7 @@ in
};
config = mkOption {
type = types.lines;
default = "";
description = ''
Extra configuration. Contents will be added verbatim to TSocks

View File

@ -0,0 +1,141 @@
{ config, pkgs, lib, ... }:
with lib;
let
cfg = config.services.ihatemoney;
user = "ihatemoney";
group = "ihatemoney";
db = "ihatemoney";
python3 = config.services.uwsgi.package.python3;
pkg = python3.pkgs.ihatemoney;
toBool = x: if x then "True" else "False";
configFile = pkgs.writeText "ihatemoney.cfg" ''
from secrets import token_hex
# load a persistent secret key
SECRET_KEY_FILE = "/var/lib/ihatemoney/secret_key"
SECRET_KEY = ""
try:
with open(SECRET_KEY_FILE) as f:
SECRET_KEY = f.read()
except FileNotFoundError:
pass
if not SECRET_KEY:
print("ihatemoney: generating a new secret key")
SECRET_KEY = token_hex(50)
with open(SECRET_KEY_FILE, "w") as f:
f.write(SECRET_KEY)
del token_hex
del SECRET_KEY_FILE
# "normal" configuration
DEBUG = False
SQLALCHEMY_DATABASE_URI = '${
if cfg.backend == "sqlite"
then "sqlite:////var/lib/ihatemoney/ihatemoney.sqlite"
else "postgresql:///${db}"}'
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_DEFAULT_SENDER = ("${cfg.defaultSender.name}", "${cfg.defaultSender.email}")
ACTIVATE_DEMO_PROJECT = ${toBool cfg.enableDemoProject}
ADMIN_PASSWORD = "${toString cfg.adminHashedPassword /*toString null == ""*/}"
ALLOW_PUBLIC_PROJECT_CREATION = ${toBool cfg.enablePublicProjectCreation}
ACTIVATE_ADMIN_DASHBOARD = ${toBool cfg.enableAdminDashboard}
${cfg.extraConfig}
'';
in
{
options.services.ihatemoney = {
enable = mkEnableOption "ihatemoney webapp. Note that this will set uwsgi to emperor mode running as root";
backend = mkOption {
type = types.enum [ "sqlite" "postgresql" ];
default = "sqlite";
description = ''
The database engine to use for ihatemoney.
If <literal>postgresql</literal> is selected, then a database called
<literal>${db}</literal> will be created. If you disable this option,
it will however not be removed.
'';
};
adminHashedPassword = mkOption {
type = types.nullOr types.str;
default = null;
description = "The hashed password of the administrator. To obtain it, run <literal>ihatemoney generate_password_hash</literal>";
};
uwsgiConfig = mkOption {
type = types.attrs;
example = {
http = ":8000";
};
description = "Additionnal configuration of the UWSGI vassal running ihatemoney. It should notably specify on which interfaces and ports the vassal should listen.";
};
defaultSender = {
name = mkOption {
type = types.str;
default = "Budget manager";
description = "The display name of the sender of ihatemoney emails";
};
email = mkOption {
type = types.str;
default = "ihatemoney@${config.networking.hostName}";
description = "The email of the sender of ihatemoney emails";
};
};
enableDemoProject = mkEnableOption "access to the demo project in ihatemoney";
enablePublicProjectCreation = mkEnableOption "permission to create projects in ihatemoney by anyone";
enableAdminDashboard = mkEnableOption "ihatemoney admin dashboard";
extraConfig = mkOption {
type = types.str;
default = "";
description = "Extra configuration appended to ihatemoney's configuration file. It is a python file, so pay attention to indentation.";
};
};
config = mkIf cfg.enable {
services.postgresql = mkIf (cfg.backend == "postgresql") {
enable = true;
ensureDatabases = [ db ];
ensureUsers = [ {
name = user;
ensurePermissions = {
"DATABASE ${db}" = "ALL PRIVILEGES";
};
} ];
};
systemd.services.postgresql = mkIf (cfg.backend == "postgresql") {
wantedBy = [ "uwsgi.service" ];
before = [ "uwsgi.service" ];
};
systemd.tmpfiles.rules = [
"d /var/lib/ihatemoney 770 ${user} ${group}"
];
users = {
users.${user} = {
isSystemUser = true;
inherit group;
};
groups.${group} = {};
};
services.uwsgi = {
enable = true;
plugins = [ "python3" ];
# the vassal needs to be able to setuid
user = "root";
group = "root";
instance = {
type = "emperor";
vassals.ihatemoney = {
type = "normal";
strict = true;
uid = user;
gid = group;
# apparently flask uses threads: https://github.com/spiral-project/ihatemoney/commit/c7815e48781b6d3a457eaff1808d179402558f8c
enable-threads = true;
module = "wsgi:application";
chdir = "${pkg}/${pkg.pythonModule.sitePackages}/ihatemoney";
env = [ "IHATEMONEY_SETTINGS_FILE_PATH=${configFile}" ];
pythonPackages = self: [ self.ihatemoney ];
} // cfg.uwsgiConfig;
};
};
};
}

View File

@ -5,10 +5,6 @@ with lib;
let
cfg = config.services.uwsgi;
uwsgi = pkgs.uwsgi.override {
plugins = cfg.plugins;
};
buildCfg = name: c:
let
plugins =
@ -23,8 +19,8 @@ let
python =
if hasPython2 && hasPython3 then
throw "`plugins` attribute in UWSGI configuration shouldn't contain both python2 and python3"
else if hasPython2 then uwsgi.python2
else if hasPython3 then uwsgi.python3
else if hasPython2 then cfg.package.python2
else if hasPython3 then cfg.package.python3
else null;
pythonEnv = python.withPackages (c.pythonPackages or (self: []));
@ -77,6 +73,11 @@ in {
description = "Where uWSGI communication sockets can live";
};
package = mkOption {
type = types.package;
internal = true;
};
instance = mkOption {
type = types.attrs;
default = {
@ -138,7 +139,7 @@ in {
'';
serviceConfig = {
Type = "notify";
ExecStart = "${uwsgi}/bin/uwsgi --uid ${cfg.user} --gid ${cfg.group} --json ${buildCfg "server" cfg.instance}/server.json";
ExecStart = "${cfg.package}/bin/uwsgi --uid ${cfg.user} --gid ${cfg.group} --json ${buildCfg "server" cfg.instance}/server.json";
ExecReload = "${pkgs.coreutils}/bin/kill -HUP $MAINPID";
ExecStop = "${pkgs.coreutils}/bin/kill -INT $MAINPID";
NotifyAccess = "main";
@ -156,5 +157,9 @@ in {
users.groups = optionalAttrs (cfg.group == "uwsgi") {
uwsgi.gid = config.ids.gids.uwsgi;
};
services.uwsgi.package = pkgs.uwsgi.override {
inherit (cfg) plugins;
};
};
}

View File

@ -171,7 +171,7 @@ in
"L+ /run/gdm/.config/pulse - - - - ${pulseConfig}"
] ++ optionals config.services.gnome3.gnome-initial-setup.enable [
# Create stamp file for gnome-initial-setup to prevent it starting in GDM.
"f /run/gdm/.config/gnome-initial-setup-done 0711 gdm gdm yes"
"f /run/gdm/.config/gnome-initial-setup-done 0711 gdm gdm - yes"
];
systemd.services.display-manager.wants = [

View File

@ -15,6 +15,7 @@ in {
systemd.services = {
powertop = {
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
description = "Powertop tunings";
path = [ pkgs.kmod ];
serviceConfig = {

View File

@ -122,6 +122,7 @@ in
i3wm = handleTest ./i3wm.nix {};
icingaweb2 = handleTest ./icingaweb2.nix {};
iftop = handleTest ./iftop.nix {};
ihatemoney = handleTest ./ihatemoney.nix {};
incron = handleTest ./incron.nix {};
influxdb = handleTest ./influxdb.nix {};
initrd-network-ssh = handleTest ./initrd-network-ssh {};

View File

@ -19,6 +19,12 @@ let
key = "AQBEEJNac00kExAAXEgy943BGyOpVH1LLlHafQ==";
uuid = "5e97a838-85b6-43b0-8950-cb56d554d1e5";
};
osd2 = {
name = "2";
ip = "192.168.1.4";
key = "AQAdyhZeIaUlARAAGRoidDAmS6Vkp546UFEf5w==";
uuid = "ea999274-13d0-4dd5-9af9-ad25a324f72f";
};
};
generateCephConfig = { daemonConfig }: {
enable = true;
@ -72,35 +78,20 @@ let
};
}; };
networkOsd0 = {
networkOsd = osd: {
dhcpcd.enable = false;
interfaces.eth1.ipv4.addresses = pkgs.lib.mkOverride 0 [
{ address = cfg.osd0.ip; prefixLength = 24; }
{ address = osd.ip; prefixLength = 24; }
];
firewall = {
allowedTCPPortRanges = [ { from = 6800; to = 7300; } ];
};
};
cephConfigOsd0 = generateCephConfig { daemonConfig = {
osd = {
enable = true;
daemons = [ cfg.osd0.name ];
};
}; };
networkOsd1 = {
dhcpcd.enable = false;
interfaces.eth1.ipv4.addresses = pkgs.lib.mkOverride 0 [
{ address = cfg.osd1.ip; prefixLength = 24; }
];
firewall = {
allowedTCPPortRanges = [ { from = 6800; to = 7300; } ];
};
};
cephConfigOsd1 = generateCephConfig { daemonConfig = {
cephConfigOsd = osd: generateCephConfig { daemonConfig = {
osd = {
enable = true;
daemons = [ cfg.osd1.name ];
daemons = [ osd.name ];
};
}; };
@ -114,6 +105,7 @@ let
monA.wait_for_unit("network.target")
osd0.wait_for_unit("network.target")
osd1.wait_for_unit("network.target")
osd2.wait_for_unit("network.target")
# Bootstrap ceph-mon daemon
monA.succeed(
@ -145,8 +137,9 @@ let
monA.succeed("cp /etc/ceph/ceph.client.admin.keyring /tmp/shared")
osd0.succeed("cp /tmp/shared/ceph.client.admin.keyring /etc/ceph")
osd1.succeed("cp /tmp/shared/ceph.client.admin.keyring /etc/ceph")
osd2.succeed("cp /tmp/shared/ceph.client.admin.keyring /etc/ceph")
# Bootstrap both OSDs
# Bootstrap OSDs
osd0.succeed(
"mkfs.xfs /dev/vdb",
"mkdir -p /var/lib/ceph/osd/ceph-${cfg.osd0.name}",
@ -161,6 +154,13 @@ let
"ceph-authtool --create-keyring /var/lib/ceph/osd/ceph-${cfg.osd1.name}/keyring --name osd.${cfg.osd1.name} --add-key ${cfg.osd1.key}",
'echo \'{"cephx_secret": "${cfg.osd1.key}"}\' | ceph osd new ${cfg.osd1.uuid} -i -',
)
osd2.succeed(
"mkfs.xfs /dev/vdb",
"mkdir -p /var/lib/ceph/osd/ceph-${cfg.osd2.name}",
"mount /dev/vdb /var/lib/ceph/osd/ceph-${cfg.osd2.name}",
"ceph-authtool --create-keyring /var/lib/ceph/osd/ceph-${cfg.osd2.name}/keyring --name osd.${cfg.osd2.name} --add-key ${cfg.osd2.key}",
'echo \'{"cephx_secret": "${cfg.osd2.key}"}\' | ceph osd new ${cfg.osd2.uuid} -i -',
)
# Initialize the OSDs with regular filestore
osd0.succeed(
@ -173,7 +173,12 @@ let
"chown -R ceph:ceph /var/lib/ceph/osd",
"systemctl start ceph-osd-${cfg.osd1.name}",
)
monA.wait_until_succeeds("ceph osd stat | grep -e '2 osds: 2 up[^,]*, 2 in'")
osd2.succeed(
"ceph-osd -i ${cfg.osd2.name} --mkfs --osd-uuid ${cfg.osd2.uuid}",
"chown -R ceph:ceph /var/lib/ceph/osd",
"systemctl start ceph-osd-${cfg.osd2.name}",
)
monA.wait_until_succeeds("ceph osd stat | grep -e '3 osds: 3 up[^,]*, 3 in'")
monA.wait_until_succeeds("ceph -s | grep 'mgr: ${cfg.monA.name}(active,'")
monA.wait_until_succeeds("ceph -s | grep 'HEALTH_OK'")
@ -196,16 +201,18 @@ let
monA.crash()
osd0.crash()
osd1.crash()
osd2.crash()
# Start it up
osd0.start()
osd1.start()
osd2.start()
monA.start()
# Ensure the cluster comes back up again
monA.succeed("ceph -s | grep 'mon: 1 daemons'")
monA.wait_until_succeeds("ceph -s | grep 'quorum ${cfg.monA.name}'")
monA.wait_until_succeeds("ceph osd stat | grep -e '2 osds: 2 up[^,]*, 2 in'")
monA.wait_until_succeeds("ceph osd stat | grep -e '3 osds: 3 up[^,]*, 3 in'")
monA.wait_until_succeeds("ceph -s | grep 'mgr: ${cfg.monA.name}(active,'")
monA.wait_until_succeeds("ceph -s | grep 'HEALTH_OK'")
'';
@ -217,8 +224,9 @@ in {
nodes = {
monA = generateHost { pkgs = pkgs; cephConfig = cephConfigMonA; networkConfig = networkMonA; };
osd0 = generateHost { pkgs = pkgs; cephConfig = cephConfigOsd0; networkConfig = networkOsd0; };
osd1 = generateHost { pkgs = pkgs; cephConfig = cephConfigOsd1; networkConfig = networkOsd1; };
osd0 = generateHost { pkgs = pkgs; cephConfig = cephConfigOsd cfg.osd0; networkConfig = networkOsd cfg.osd0; };
osd1 = generateHost { pkgs = pkgs; cephConfig = cephConfigOsd cfg.osd1; networkConfig = networkOsd cfg.osd1; };
osd2 = generateHost { pkgs = pkgs; cephConfig = cephConfigOsd cfg.osd2; networkConfig = networkOsd cfg.osd2; };
};
testScript = testscript;

View File

@ -17,6 +17,11 @@ let
key = "AQBEEJNac00kExAAXEgy943BGyOpVH1LLlHafQ==";
uuid = "5e97a838-85b6-43b0-8950-cb56d554d1e5";
};
osd2 = {
name = "2";
key = "AQAdyhZeIaUlARAAGRoidDAmS6Vkp546UFEf5w==";
uuid = "ea999274-13d0-4dd5-9af9-ad25a324f72f";
};
};
generateCephConfig = { daemonConfig }: {
enable = true;
@ -30,7 +35,7 @@ let
generateHost = { pkgs, cephConfig, networkConfig, ... }: {
virtualisation = {
memorySize = 512;
emptyDiskImages = [ 20480 20480 ];
emptyDiskImages = [ 20480 20480 20480 ];
vlans = [ 1 ];
};
@ -65,7 +70,7 @@ let
};
osd = {
enable = true;
daemons = [ cfg.osd0.name cfg.osd1.name ];
daemons = [ cfg.osd0.name cfg.osd1.name cfg.osd2.name ];
};
}; };
@ -104,29 +109,36 @@ let
monA.wait_until_succeeds("ceph -s | grep 'quorum ${cfg.monA.name}'")
monA.wait_until_succeeds("ceph -s | grep 'mgr: ${cfg.monA.name}(active,'")
# Bootstrap both OSDs
# Bootstrap OSDs
monA.succeed(
"mkfs.xfs /dev/vdb",
"mkfs.xfs /dev/vdc",
"mkfs.xfs /dev/vdd",
"mkdir -p /var/lib/ceph/osd/ceph-${cfg.osd0.name}",
"mount /dev/vdb /var/lib/ceph/osd/ceph-${cfg.osd0.name}",
"mkdir -p /var/lib/ceph/osd/ceph-${cfg.osd1.name}",
"mount /dev/vdc /var/lib/ceph/osd/ceph-${cfg.osd1.name}",
"mkdir -p /var/lib/ceph/osd/ceph-${cfg.osd2.name}",
"mount /dev/vdd /var/lib/ceph/osd/ceph-${cfg.osd2.name}",
"ceph-authtool --create-keyring /var/lib/ceph/osd/ceph-${cfg.osd0.name}/keyring --name osd.${cfg.osd0.name} --add-key ${cfg.osd0.key}",
"ceph-authtool --create-keyring /var/lib/ceph/osd/ceph-${cfg.osd1.name}/keyring --name osd.${cfg.osd1.name} --add-key ${cfg.osd1.key}",
"ceph-authtool --create-keyring /var/lib/ceph/osd/ceph-${cfg.osd2.name}/keyring --name osd.${cfg.osd2.name} --add-key ${cfg.osd2.key}",
'echo \'{"cephx_secret": "${cfg.osd0.key}"}\' | ceph osd new ${cfg.osd0.uuid} -i -',
'echo \'{"cephx_secret": "${cfg.osd1.key}"}\' | ceph osd new ${cfg.osd1.uuid} -i -',
'echo \'{"cephx_secret": "${cfg.osd2.key}"}\' | ceph osd new ${cfg.osd2.uuid} -i -',
)
# Initialize the OSDs with regular filestore
monA.succeed(
"ceph-osd -i ${cfg.osd0.name} --mkfs --osd-uuid ${cfg.osd0.uuid}",
"ceph-osd -i ${cfg.osd1.name} --mkfs --osd-uuid ${cfg.osd1.uuid}",
"ceph-osd -i ${cfg.osd2.name} --mkfs --osd-uuid ${cfg.osd2.uuid}",
"chown -R ceph:ceph /var/lib/ceph/osd",
"systemctl start ceph-osd-${cfg.osd0.name}",
"systemctl start ceph-osd-${cfg.osd1.name}",
"systemctl start ceph-osd-${cfg.osd2.name}",
)
monA.wait_until_succeeds("ceph osd stat | grep -e '2 osds: 2 up[^,]*, 2 in'")
monA.wait_until_succeeds("ceph osd stat | grep -e '3 osds: 3 up[^,]*, 3 in'")
monA.wait_until_succeeds("ceph -s | grep 'mgr: ${cfg.monA.name}(active,'")
monA.wait_until_succeeds("ceph -s | grep 'HEALTH_OK'")
@ -161,11 +173,12 @@ let
monA.wait_for_unit("ceph-mgr-${cfg.monA.name}")
monA.wait_for_unit("ceph-osd-${cfg.osd0.name}")
monA.wait_for_unit("ceph-osd-${cfg.osd1.name}")
monA.wait_for_unit("ceph-osd-${cfg.osd2.name}")
# Ensure the cluster comes back up again
monA.succeed("ceph -s | grep 'mon: 1 daemons'")
monA.wait_until_succeeds("ceph -s | grep 'quorum ${cfg.monA.name}'")
monA.wait_until_succeeds("ceph osd stat | grep -e '2 osds: 2 up[^,]*, 2 in'")
monA.wait_until_succeeds("ceph osd stat | grep -e '3 osds: 3 up[^,]*, 3 in'")
monA.wait_until_succeeds("ceph -s | grep 'mgr: ${cfg.monA.name}(active,'")
monA.wait_until_succeeds("ceph -s | grep 'HEALTH_OK'")
'';

View File

@ -4,6 +4,7 @@
{ isNormalUser = true;
description = "Alice Foobar";
password = "foobar";
uid = 1000;
};
users.users.bob =

View File

@ -6,20 +6,12 @@
# NIXPKGS_ALLOW_UNFREE=1 nix-build nixos/tests/elk.nix -A ELK-6 --arg enableUnfree true
}:
with import ../lib/testing.nix { inherit system pkgs; };
with pkgs.lib;
let
esUrl = "http://localhost:9200";
totalHits = message :
"curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' " +
''-d '{\"query\" : { \"match\" : { \"message\" : \"${message}\"}}}' '' +
"| jq .hits.total";
mkElkTest = name : elk :
let elasticsearchGe7 = builtins.compareVersions elk.elasticsearch.version "7" >= 0;
in makeTest {
in import ./make-test-python.nix ({
inherit name;
meta = with pkgs.stdenv.lib.maintainers; {
maintainers = [ eelco offline basvandijk ];
@ -50,15 +42,15 @@ let
elk.journalbeat.version "6" < 0; in {
enable = true;
package = elk.journalbeat;
extraConfig = mkOptionDefault (''
extraConfig = pkgs.lib.mkOptionDefault (''
logging:
to_syslog: true
level: warning
metrics.enabled: false
output.elasticsearch:
hosts: [ "127.0.0.1:9200" ]
${optionalString lt6 "template.enabled: false"}
'' + optionalString (!lt6) ''
${pkgs.lib.optionalString lt6 "template.enabled: false"}
'' + pkgs.lib.optionalString (!lt6) ''
journalbeat.inputs:
- paths: []
seek: cursor
@ -130,11 +122,23 @@ let
};
testScript = ''
startAll;
import json
# Wait until elasticsearch is listening for connections.
$one->waitForUnit("elasticsearch.service");
$one->waitForOpenPort(9200);
def total_hits(message):
dictionary = {"query": {"match": {"message": message}}}
return (
"curl --silent --show-error '${esUrl}/_search' "
+ "-H 'Content-Type: application/json' "
+ "-d '{}' ".format(json.dumps(dictionary))
+ "| jq .hits.total"
)
start_all()
one.wait_for_unit("elasticsearch.service")
one.wait_for_open_port(9200)
# Continue as long as the status is not "red". The status is probably
# "yellow" instead of "green" because we are using a single elasticsearch
@ -142,42 +146,43 @@ let
#
# TODO: extend this test with multiple elasticsearch nodes
# and see if the status turns "green".
$one->waitUntilSucceeds(
"curl --silent --show-error '${esUrl}/_cluster/health' " .
"| jq .status | grep -v red");
one.wait_until_succeeds(
"curl --silent --show-error '${esUrl}/_cluster/health' | jq .status | grep -v red"
)
# Perform some simple logstash tests.
$one->waitForUnit("logstash.service");
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep flowers");
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep -v dragons");
with subtest("Perform some simple logstash tests"):
one.wait_for_unit("logstash.service")
one.wait_until_succeeds("cat /tmp/logstash.out | grep flowers")
one.wait_until_succeeds("cat /tmp/logstash.out | grep -v dragons")
# See if kibana is healthy.
$one->waitForUnit("kibana.service");
$one->waitUntilSucceeds(
"curl --silent --show-error 'http://localhost:5601/api/status' " .
"| jq .status.overall.state | grep green");
with subtest("Kibana is healthy"):
one.wait_for_unit("kibana.service")
one.wait_until_succeeds(
"curl --silent --show-error 'http://localhost:5601/api/status' | jq .status.overall.state | grep green"
)
# See if logstash messages arive in elasticsearch.
$one->waitUntilSucceeds("${totalHits "flowers"} | grep -v 0");
$one->waitUntilSucceeds("${totalHits "dragons"} | grep 0");
with subtest("Logstash messages arive in elasticsearch"):
one.wait_until_succeeds(total_hits("flowers") + " | grep -v 0")
one.wait_until_succeeds(total_hits("dragons") + " | grep 0")
# Test if a message logged to the journal
# is ingested by elasticsearch via journalbeat.
$one->waitForUnit("journalbeat.service");
$one->execute("echo 'Supercalifragilisticexpialidocious' | systemd-cat");
$one->waitUntilSucceeds(
"${totalHits "Supercalifragilisticexpialidocious"} | grep -v 0");
'' + optionalString (!elasticsearchGe7) ''
# Test elasticsearch-curator.
$one->systemctl("stop logstash");
$one->systemctl("start elasticsearch-curator");
$one->waitUntilSucceeds(
"! curl --silent --show-error '${esUrl}/_cat/indices' " .
"| grep logstash | grep -q ^$1");
with subtest(
"A message logged to the journal is ingested by elasticsearch via journalbeat"
):
one.wait_for_unit("journalbeat.service")
one.execute("echo 'Supercalifragilisticexpialidocious' | systemd-cat")
one.wait_until_succeeds(
total_hits("Supercalifragilisticexpialidocious") + " | grep -v 0"
)
'' + pkgs.lib.optionalString (!elasticsearchGe7) ''
with subtest("Elasticsearch-curator works"):
one.systemctl("stop logstash")
one.systemctl("start elasticsearch-curator")
one.wait_until_succeeds(
'! curl --silent --show-error "${esUrl}/_cat/indices" | grep logstash | grep -q ^'
)
'';
};
in mapAttrs mkElkTest {
}) {};
in pkgs.lib.mapAttrs mkElkTest {
ELK-6 =
if enableUnfree
then {

View File

@ -1,41 +1,79 @@
import ./make-test.nix ({ pkgs, ...} : {
import ./make-test-python.nix ({ pkgs, ...} : {
name = "gnome3-xorg";
meta = with pkgs.stdenv.lib.maintainers; {
maintainers = pkgs.gnome3.maintainers;
};
machine =
{ ... }:
machine = { nodes, ... }: let
user = nodes.machine.config.users.users.alice;
in
{ imports = [ ./common/user-account.nix ];
services.xserver.enable = true;
services.xserver.displayManager.gdm.enable = false;
services.xserver.displayManager.lightdm.enable = true;
services.xserver.displayManager.lightdm.autoLogin.enable = true;
services.xserver.displayManager.lightdm.autoLogin.user = "alice";
services.xserver.displayManager.gdm = {
enable = true;
autoLogin = {
enable = true;
user = user.name;
};
};
services.xserver.desktopManager.gnome3.enable = true;
services.xserver.displayManager.defaultSession = "gnome-xorg";
virtualisation.memorySize = 1024;
};
testScript =
''
$machine->waitForX;
testScript = { nodes, ... }: let
user = nodes.machine.config.users.users.alice;
uid = toString user.uid;
bus = "DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/${uid}/bus";
xauthority = "/run/user/${uid}/gdm/Xauthority";
display = "DISPLAY=:0.0";
env = "${bus} XAUTHORITY=${xauthority} ${display}";
gdbus = "${env} gdbus";
su = command: "su - ${user.name} -c '${env} ${command}'";
# wait for alice to be logged in
$machine->waitForUnit("default.target","alice");
# Call javascript in gnome shell, returns a tuple (success, output), where
# `success` is true if the dbus call was successful and output is what the
# javascript evaluates to.
eval = "call --session -d org.gnome.Shell -o /org/gnome/Shell -m org.gnome.Shell.Eval";
# Check that logging in has given the user ownership of devices.
$machine->succeed("getfacl -p /dev/snd/timer | grep -q alice");
# False when startup is done
startingUp = su "${gdbus} ${eval} Main.layoutManager._startingUp";
$machine->succeed("su - alice -c 'DISPLAY=:0.0 gnome-terminal &'");
$machine->succeed("xauth merge ~alice/.Xauthority");
$machine->waitForWindow(qr/alice.*machine/);
$machine->succeed("timeout 900 bash -c 'while read msg; do if [[ \$msg =~ \"GNOME Shell started\" ]]; then break; fi; done < <(journalctl -f)'");
$machine->sleep(10);
$machine->screenshot("screen");
# Start gnome-terminal
gnomeTerminalCommand = su "gnome-terminal";
# Hopefully gnome-terminal's wm class
wmClass = su "${gdbus} ${eval} global.display.focus_window.wm_class";
in ''
with subtest("Login to GNOME Xorg with GDM"):
machine.wait_for_x()
# Wait for alice to be logged in"
machine.wait_for_unit("default.target", "${user.name}")
machine.wait_for_file("${xauthority}")
machine.succeed("xauth merge ${xauthority}")
# Check that logging in has given the user ownership of devices
assert "alice" in machine.succeed("getfacl -p /dev/snd/timer")
with subtest("Wait for GNOME Shell"):
# correct output should be (true, 'false')
machine.wait_until_succeeds(
"${startingUp} | grep -q 'true,..false'"
)
with subtest("Open Gnome Terminal"):
machine.succeed(
"${gnomeTerminalCommand}"
)
# correct output should be (true, '"Gnome-terminal"')
machine.wait_until_succeeds(
"${wmClass} | grep -q 'true,...Gnome-terminal'"
)
machine.sleep(20)
machine.screenshot("screen")
'';
})

View File

@ -1,11 +1,10 @@
import ./make-test.nix ({ pkgs, ... }:
import ./make-test-python.nix ({ pkgs, ... }:
let
configDir = "/var/lib/foobar";
apiPassword = "some_secret";
mqttPassword = "another_secret";
hassCli = "hass-cli --server http://hass:8123 --password '${apiPassword}'";
in {
name = "home-assistant";
meta = with pkgs.stdenv.lib; {
@ -69,36 +68,44 @@ in {
};
testScript = ''
startAll;
$hass->waitForUnit("home-assistant.service");
start_all()
hass.wait_for_unit("home-assistant.service")
with subtest("Check that YAML configuration file is in place"):
hass.succeed("test -L ${configDir}/configuration.yaml")
with subtest("lovelace config is copied because lovelaceConfigWritable = true"):
hass.succeed("test -f ${configDir}/ui-lovelace.yaml")
with subtest("Check that Home Assistant's web interface and API can be reached"):
hass.wait_for_open_port(8123)
hass.succeed("curl --fail http://localhost:8123/states")
assert "API running" in hass.succeed(
"curl --fail -H 'x-ha-access: ${apiPassword}' http://localhost:8123/api/"
)
with subtest("Toggle a binary sensor using MQTT"):
assert '"state": "off"' in hass.succeed(
"curl http://localhost:8123/api/states/binary_sensor.mqtt_binary_sensor -H 'x-ha-access: ${apiPassword}'"
)
hass.wait_until_succeeds(
"mosquitto_pub -V mqttv311 -t home-assistant/test -u homeassistant -P '${mqttPassword}' -m let_there_be_light"
)
assert '"state": "on"' in hass.succeed(
"curl http://localhost:8123/api/states/binary_sensor.mqtt_binary_sensor -H 'x-ha-access: ${apiPassword}'"
)
with subtest("Toggle a binary sensor using hass-cli"):
assert '"state": "on"' in hass.succeed(
"${hassCli} --output json state get binary_sensor.mqtt_binary_sensor"
)
hass.succeed(
"${hassCli} state edit binary_sensor.mqtt_binary_sensor --json='{\"state\": \"off\"}'"
)
assert '"state": "off"' in hass.succeed(
"curl http://localhost:8123/api/states/binary_sensor.mqtt_binary_sensor -H 'x-ha-access: ${apiPassword}'"
)
with subtest("Print log to ease debugging"):
output_log = hass.succeed("cat ${configDir}/home-assistant.log")
print("\n### home-assistant.log ###\n")
print(output_log + "\n")
# The config is specified using a Nix attribute set,
# converted from JSON to YAML, and linked to the config dir
$hass->succeed("test -L ${configDir}/configuration.yaml");
# The lovelace config is copied because lovelaceConfigWritable = true
$hass->succeed("test -f ${configDir}/ui-lovelace.yaml");
# Check that Home Assistant's web interface and API can be reached
$hass->waitForOpenPort(8123);
$hass->succeed("curl --fail http://localhost:8123/states");
$hass->succeed("curl --fail -H 'x-ha-access: ${apiPassword}' http://localhost:8123/api/ | grep -qF 'API running'");
# Toggle a binary sensor using MQTT
$hass->succeed("curl http://localhost:8123/api/states/binary_sensor.mqtt_binary_sensor -H 'x-ha-access: ${apiPassword}' | grep -qF '\"state\": \"off\"'");
$hass->waitUntilSucceeds("mosquitto_pub -V mqttv311 -t home-assistant/test -u homeassistant -P '${mqttPassword}' -m let_there_be_light");
$hass->succeed("curl http://localhost:8123/api/states/binary_sensor.mqtt_binary_sensor -H 'x-ha-access: ${apiPassword}' | grep -qF '\"state\": \"on\"'");
# Toggle a binary sensor using hass-cli
$hass->succeed("${hassCli} --output json state get binary_sensor.mqtt_binary_sensor | grep -qF '\"state\": \"on\"'");
$hass->succeed("${hassCli} state edit binary_sensor.mqtt_binary_sensor --json='{\"state\": \"off\"}'");
$hass->succeed("curl http://localhost:8123/api/states/binary_sensor.mqtt_binary_sensor -H 'x-ha-access: ${apiPassword}' | grep -qF '\"state\": \"off\"'");
# Print log to ease debugging
my $log = $hass->succeed("cat ${configDir}/home-assistant.log");
print "\n### home-assistant.log ###\n";
print "$log\n";
# Check that no errors were logged
$hass->fail("cat ${configDir}/home-assistant.log | grep -qF ERROR");
with subtest("Check that no errors were logged"):
assert "ERROR" not in output_log
'';
})

View File

@ -0,0 +1,52 @@
{ system ? builtins.currentSystem
, config ? {}
, pkgs ? import ../.. { inherit system config; }
}:
let
inherit (import ../lib/testing.nix { inherit system pkgs; }) makeTest;
in
map (
backend: makeTest {
name = "ihatemoney-${backend}";
machine = { lib, ... }: {
services.ihatemoney = {
enable = true;
enablePublicProjectCreation = true;
inherit backend;
uwsgiConfig = {
http = ":8000";
};
};
boot.cleanTmpDir = true;
# ihatemoney needs a local smtp server otherwise project creation just crashes
services.opensmtpd = {
enable = true;
serverConfiguration = ''
listen on lo
action foo relay
match from any for any action foo
'';
};
};
testScript = ''
$machine->waitForOpenPort(8000);
$machine->waitForUnit("uwsgi.service");
my $return = $machine->succeed("curl -X POST http://localhost:8000/api/projects -d 'name=yay&id=yay&password=yay&contact_email=yay\@example.com'");
die "wrong project id $return" unless "\"yay\"\n" eq $return;
my $timestamp = $machine->succeed("stat --printf %Y /var/lib/ihatemoney/secret_key");
my $owner = $machine->succeed("stat --printf %U:%G /var/lib/ihatemoney/secret_key");
die "wrong ownership for the secret key: $owner, is uwsgi running as the right user ?" unless $owner eq "ihatemoney:ihatemoney";
$machine->shutdown();
$machine->start();
$machine->waitForOpenPort(8000);
$machine->waitForUnit("uwsgi.service");
# check that the database is really persistent
print $machine->succeed("curl --basic -u yay:yay http://localhost:8000/api/projects/yay");
# check that the secret key is really persistent
my $timestamp2 = $machine->succeed("stat --printf %Y /var/lib/ihatemoney/secret_key");
die unless $timestamp eq $timestamp2;
$machine->succeed("curl http://localhost:8000 | grep ihatemoney");
'';
}
) [ "sqlite" "postgresql" ]

View File

@ -1,26 +0,0 @@
let
pkgs = import <nixpkgs> { };
in
with import <nixpkgs/nixos/lib/testing.nix> { inherit pkgs; system = builtins.currentSystem; };
with pkgs.lib;
makeTest {
name = "pg-initdb";
machine = {...}:
{
documentation.enable = false;
services.postgresql.enable = true;
services.postgresql.package = pkgs.postgresql_9_6;
environment.pathsToLink = [
"/share/postgresql"
];
};
testScript = ''
$machine->start;
$machine->succeed("sudo -u postgres initdb -D /tmp/testpostgres2");
$machine->shutdown;
'';
}

View File

@ -3,11 +3,10 @@
pkgs ? import ../.. { inherit system config; }
}:
with import ../lib/testing.nix { inherit system pkgs; };
with pkgs.lib;
let
makeKafkaTest = name: kafkaPackage: (makeTest {
makeKafkaTest = name: kafkaPackage: (import ./make-test-python.nix ({
inherit name;
meta = with pkgs.stdenv.lib.maintainers; {
maintainers = [ nequissimus ];
@ -45,24 +44,40 @@ let
};
testScript = ''
startAll;
start_all()
$zookeeper1->waitForUnit("default.target");
$zookeeper1->waitForUnit("zookeeper.service");
$zookeeper1->waitForOpenPort(2181);
zookeeper1.wait_for_unit("default.target")
zookeeper1.wait_for_unit("zookeeper.service")
zookeeper1.wait_for_open_port(2181)
$kafka->waitForUnit("default.target");
$kafka->waitForUnit("apache-kafka.service");
$kafka->waitForOpenPort(9092);
kafka.wait_for_unit("default.target")
kafka.wait_for_unit("apache-kafka.service")
kafka.wait_for_open_port(9092)
$kafka->waitUntilSucceeds("${kafkaPackage}/bin/kafka-topics.sh --create --zookeeper zookeeper1:2181 --partitions 1 --replication-factor 1 --topic testtopic");
$kafka->mustSucceed("echo 'test 1' | ${kafkaPackage}/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic testtopic");
kafka.wait_until_succeeds(
"${kafkaPackage}/bin/kafka-topics.sh --create "
+ "--zookeeper zookeeper1:2181 --partitions 1 "
+ "--replication-factor 1 --topic testtopic"
)
kafka.succeed(
"echo 'test 1' | "
+ "${kafkaPackage}/bin/kafka-console-producer.sh "
+ "--broker-list localhost:9092 --topic testtopic"
)
'' + (if name == "kafka_0_9" then ''
$kafka->mustSucceed("${kafkaPackage}/bin/kafka-console-consumer.sh --zookeeper zookeeper1:2181 --topic testtopic --from-beginning --max-messages 1 | grep 'test 1'");
assert "test 1" in kafka.succeed(
"${kafkaPackage}/bin/kafka-console-consumer.sh "
+ "--zookeeper zookeeper1:2181 --topic testtopic "
+ "--from-beginning --max-messages 1"
)
'' else ''
$kafka->mustSucceed("${kafkaPackage}/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic testtopic --from-beginning --max-messages 1 | grep 'test 1'");
assert "test 1" in kafka.succeed(
"${kafkaPackage}/bin/kafka-console-consumer.sh "
+ "--bootstrap-server localhost:9092 --topic testtopic "
+ "--from-beginning --max-messages 1"
)
'');
});
}) {});
in with pkgs; {
kafka_0_9 = makeKafkaTest "kafka_0_9" apacheKafka_0_9;
@ -74,4 +89,5 @@ in with pkgs; {
kafka_2_1 = makeKafkaTest "kafka_2_1" apacheKafka_2_1;
kafka_2_2 = makeKafkaTest "kafka_2_2" apacheKafka_2_2;
kafka_2_3 = makeKafkaTest "kafka_2_3" apacheKafka_2_3;
kafka_2_4 = makeKafkaTest "kafka_2_4" apacheKafka_2_4;
}

View File

@ -29,11 +29,15 @@ let
machine = {...}:
{
services.postgresql.enable = true;
services.postgresql.package = postgresql-package;
services.postgresql = {
enable = true;
package = postgresql-package;
};
services.postgresqlBackup.enable = true;
services.postgresqlBackup.databases = optional (!backup-all) "postgres";
services.postgresqlBackup = {
enable = true;
databases = optional (!backup-all) "postgres";
};
};
testScript = let
@ -49,23 +53,32 @@ let
machine.start()
machine.wait_for_unit("postgresql")
# postgresql should be available just after unit start
machine.succeed(
"cat ${test-sql} | sudo -u postgres psql"
)
machine.shutdown() # make sure that postgresql survive restart (bug #1735)
time.sleep(2)
machine.start()
machine.wait_for_unit("postgresql")
with subtest("Postgresql is available just after unit start"):
machine.succeed(
"cat ${test-sql} | sudo -u postgres psql"
)
with subtest("Postgresql survives restart (bug #1735)"):
machine.shutdown()
time.sleep(2)
machine.start()
machine.wait_for_unit("postgresql")
machine.fail(check_count("SELECT * FROM sth;", 3))
machine.succeed(check_count("SELECT * FROM sth;", 5))
machine.fail(check_count("SELECT * FROM sth;", 4))
machine.succeed(check_count("SELECT xpath('/test/text()', doc) FROM xmltest;", 1))
# Check backup service
machine.succeed("systemctl start ${backupService}.service")
machine.succeed("zcat /var/backup/postgresql/${backupName}.sql.gz | grep '<test>ok</test>'")
machine.succeed("stat -c '%a' /var/backup/postgresql/${backupName}.sql.gz | grep 600")
with subtest("Backup service works"):
machine.succeed(
"systemctl start ${backupService}.service",
"zcat /var/backup/postgresql/${backupName}.sql.gz | grep '<test>ok</test>'",
"stat -c '%a' /var/backup/postgresql/${backupName}.sql.gz | grep 600",
)
with subtest("Initdb works"):
machine.succeed("sudo -u postgres initdb -D /tmp/testpostgres2")
machine.shutdown()
'';

View File

@ -7,13 +7,13 @@
mkDerivation rec {
pname = "elisa";
version = "0.4.2";
version = "19.12.0";
src = fetchFromGitHub {
owner = "KDE";
repo = "elisa";
rev = "v${version}";
sha256 = "0q098zaajwbpkrarrsdzpjhpsq2nxkqaxwzhr2gjlg08j9vqkpfm";
sha256 = "1939xwhy1s502pai63vz56hnnsl3qsb6arhrlg5bw6bwsv88blac";
};
buildInputs = [ vlc ];

View File

@ -1,6 +1,7 @@
{ stdenv, lib, fetchFromGitHub, cmake, qt5, libuchardet, pkgconfig, makeWrapper
{ stdenv, lib, fetchFromGitHub, cmake, libuchardet, pkgconfig
, shntool, flac, opusTools, vorbis-tools, mp3gain, lame, wavpack, vorbisgain
, gtk3
, qtbase, qttools, wrapQtAppsHook
}:
stdenv.mkDerivation rec {
@ -14,8 +15,8 @@ stdenv.mkDerivation rec {
sha256 = "05pvg5xhc2azwzld08m81r4b2krqdbcbm5lmdvg2zkk67xq9pqyd";
};
nativeBuildInputs = [ cmake pkgconfig makeWrapper ];
buildInputs = [ qt5.qtbase qt5.qttools libuchardet ];
nativeBuildInputs = [ cmake pkgconfig wrapQtAppsHook ];
buildInputs = [ qtbase qttools libuchardet ];
postInstall = ''
wrapProgram $out/bin/flacon \
@ -29,6 +30,6 @@ stdenv.mkDerivation rec {
homepage = https://flacon.github.io/;
license = licenses.lgpl21;
platforms = platforms.linux;
maintainers = with maintainers; [ nico202 ];
maintainers = with maintainers; [ snglth ];
};
}

View File

@ -9,11 +9,11 @@
stdenv.mkDerivation rec {
pname = "kid3";
version = "3.8.0";
version = "3.8.1";
src = fetchurl {
url = "mirror://sourceforge/project/kid3/kid3/${version}/${pname}-${version}.tar.gz";
sha256 = "1a6ixkkdp1fl3arylx06w73mwf26i0ibyplwwcn2kw5xsfxmbjp6";
sha256 = "1d2lr500dx7gnxw2vrvpbhadpn313ly3zyp178864z26dnfkjv8x";
};
nativeBuildInputs = [ wrapQtAppsHook ];

View File

@ -46,13 +46,13 @@ let
];
in stdenv.mkDerivation rec {
pname = "pulseeffects";
version = "4.6.9";
version = "4.7.0";
src = fetchFromGitHub {
owner = "wwmm";
repo = "pulseeffects";
rev = "v${version}";
sha256 = "0ag19hvf50ip7z1s8jziy4pm8c72w7qq9zzgb4967l6v17rar4yh";
sha256 = "1cpiill24c54sy97xm1r0sqqpxj6ar40pnnwb72qs8b9zzci920r";
};
nativeBuildInputs = [

View File

@ -1,14 +1,14 @@
{ stdenv, mkDerivation, fetchurl, pkgconfig, alsaLib, libjack2, dbus, qtbase, qttools, qtx11extras }:
mkDerivation rec {
version = "0.6.0";
version = "0.6.1";
pname = "qjackctl";
# some dependencies such as killall have to be installed additionally
src = fetchurl {
url = "mirror://sourceforge/qjackctl/${pname}-${version}.tar.gz";
sha256 = "1kddvxxhwvw1ps1c1drr08hxqci7jw4jwr8h1d9isb8agydfxmcx";
sha256 = "09y0s43ypcghnvzsz89mi1qs5bsvn4hpb98lqxmwwsy0ikcv3hny";
};
buildInputs = [

View File

@ -1,12 +1,12 @@
{ stdenv, fetchurl, pkgconfig, qt5, alsaLib, libjack2 }:
stdenv.mkDerivation rec {
version = "0.6.0";
version = "0.6.1";
pname = "qmidinet";
src = fetchurl {
url = "mirror://sourceforge/qmidinet/${pname}-${version}.tar.gz";
sha256 = "07hgk3a8crx262rm1fzggqarz8f1ml910vwgd32mbvlarws5cv0n";
sha256 = "1nvbvx3wg2s6s7r4x6m2pm9nx7pdz00ghw9h10wfqi2s474mwip0";
};
hardeningDisable = [ "format" ];

View File

@ -4,11 +4,11 @@
mkDerivation rec {
pname = "qsynth";
version = "0.6.0";
version = "0.6.1";
src = fetchurl {
url = "mirror://sourceforge/qsynth/${pname}-${version}.tar.gz";
sha256 = "173v0jqybi5szxxvj4n6wyg9sj54rmm6pxwhynx8wkm7nsbh0aij";
sha256 = "12jhfan81a10vbqfky5nmam3lk6d0i4654mm192v68q5r867xmcl";
};
nativeBuildInputs = [ autoconf pkgconfig ];

View File

@ -4,11 +4,11 @@
stdenv.mkDerivation rec {
pname = "setbfree";
version = "0.8.10";
version = "0.8.11";
src = fetchzip {
url = "https://github.com/pantherb/setBfree/archive/v${version}.tar.gz";
sha256 = "1hpj8qb5mhkqm4yy8mzzrrq0ljw22y807qly90vjkg61ascyina4";
sha256 = "0c2wc8nkrzsy0yic4y7hjz320m3d20r8152j9dk8nsnmgjmyr2ir";
};
postPatch = ''

View File

@ -2,12 +2,12 @@
let
pname = "ledger-live-desktop";
version = "1.19.2";
version = "1.20.0";
name = "${pname}-${version}";
src = fetchurl {
url = "https://github.com/LedgerHQ/${pname}/releases/download/v${version}/${pname}-${version}-linux-x86_64.AppImage";
sha256 = "1qgk0gwn7ijz00zfwf9rfl755lhgsig9xa6c3icj6l6wyzybz0z6";
sha256 = "09mgd5nsd65w4irgzgmfz1k0r1k4fgkq490pkil8nqy6akjrsw1z";
};
appimageContents = appimageTools.extractType2 {

View File

@ -18,9 +18,9 @@ let
sha256Hash = "0xpcihr5xxr9l1kv6aflywshs8fww3s7di0g98mz475whhxwzf3q";
};
latestVersion = { # canary & dev
version = "4.0.0.7"; # "Android Studio 4.0 Canary 7"
build = "193.6085562";
sha256Hash = "0vk1vwh2yhsmadkb3v3m042ckzizc41ckqvj3jax8p86gl0b4whj";
version = "4.0.0.8"; # "Android Studio 4.0 Canary 8"
build = "193.6107147";
sha256Hash = "0bdibjp52jjlyh0966p9657xxmz1z7vi262v6ss4ywpb7gpaj9qq";
};
in {
# Attributes are named by their corresponding release channels

View File

@ -208,10 +208,10 @@
elpaBuild {
pname = "async";
ename = "async";
version = "1.9.2";
version = "1.9.3";
src = fetchurl {
url = "https://elpa.gnu.org/packages/async-1.9.2.tar";
sha256 = "17fnvrj7jww29sav6a6jpizclg4w2962m6h37akpii71gf0vrffw";
url = "https://elpa.gnu.org/packages/async-1.9.3.tar";
sha256 = "1pmfjrlapvhkjqcal8x95w190hm9wsgxb3byc22rc1gf5z0p52c8";
};
packageRequires = [ cl-lib nadvice ];
meta = {
@ -223,10 +223,10 @@
elpaBuild {
pname = "auctex";
ename = "auctex";
version = "12.1.2";
version = "12.2.0";
src = fetchurl {
url = "https://elpa.gnu.org/packages/auctex-12.1.2.tar";
sha256 = "1yibg2anpmyr2a27wm4xqjsvsi9km2jzb56bf7cwyj8dnjfsd11n";
url = "https://elpa.gnu.org/packages/auctex-12.2.0.tar";
sha256 = "0j919l3q5sq6h1k1kmk4kyv0vkzl4f98fxcd64v34x5q1ahjhg48";
};
packageRequires = [ cl-lib emacs ];
meta = {
@ -324,6 +324,21 @@
license = lib.licenses.free;
};
}) {};
bluetooth = callPackage ({ dash, elpaBuild, emacs, fetchurl, lib }:
elpaBuild {
pname = "bluetooth";
ename = "bluetooth";
version = "0.1.2";
src = fetchurl {
url = "https://elpa.gnu.org/packages/bluetooth-0.1.2.el";
sha256 = "1vp2vpyq0ybjni35ics1mg1kiwgvc7x12dlmvygy78sqp52sfkcv";
};
packageRequires = [ dash emacs ];
meta = {
homepage = "https://elpa.gnu.org/packages/bluetooth.html";
license = lib.licenses.free;
};
}) {};
bnf-mode = callPackage ({ cl-lib ? null, elpaBuild, emacs, fetchurl, lib }:
elpaBuild {
pname = "bnf-mode";
@ -542,10 +557,10 @@
elpaBuild {
pname = "company";
ename = "company";
version = "0.9.10";
version = "0.9.11";
src = fetchurl {
url = "https://elpa.gnu.org/packages/company-0.9.10.tar";
sha256 = "1w6a11rzsx5iyxn76xarzbghm460j0ampqxlh4j12bswvbn7swnd";
url = "https://elpa.gnu.org/packages/company-0.9.11.tar";
sha256 = "05fmkvsn5byaakl8i26nvnmiiz0766r17pni6x7bfrqby39ls233";
};
packageRequires = [ emacs ];
meta = {
@ -640,10 +655,10 @@
elpaBuild {
pname = "csv-mode";
ename = "csv-mode";
version = "1.9";
version = "1.10";
src = fetchurl {
url = "https://elpa.gnu.org/packages/csv-mode-1.9.el";
sha256 = "0sdnyi9in904k49yy5imapypnmk75lv14k9c1yyjhjpalvvh6br1";
url = "https://elpa.gnu.org/packages/csv-mode-1.10.el";
sha256 = "0q7j2cmj7vs6hz8cnf7j7lmlcjmig3jn2p6az345z96agl8a5xsq";
};
packageRequires = [ cl-lib emacs ];
meta = {
@ -711,21 +726,16 @@
license = lib.licenses.free;
};
}) {};
debbugs = callPackage ({ cl-lib ? null
, elpaBuild
, emacs
, fetchurl
, lib
, soap-client }:
debbugs = callPackage ({ elpaBuild, emacs, fetchurl, lib, soap-client }:
elpaBuild {
pname = "debbugs";
ename = "debbugs";
version = "0.20";
version = "0.21";
src = fetchurl {
url = "https://elpa.gnu.org/packages/debbugs-0.20.tar";
sha256 = "03mmb1zvbqlsznl5agq8k3xrlcz310vnsa2zn0y8myanm4ra51zm";
url = "https://elpa.gnu.org/packages/debbugs-0.21.tar";
sha256 = "1xx1wjfpsnwx2fpydqhwy9k1b5kjk8dqbkzf8lqaj9c4rvjbn50a";
};
packageRequires = [ cl-lib emacs soap-client ];
packageRequires = [ emacs soap-client ];
meta = {
homepage = "https://elpa.gnu.org/packages/debbugs.html";
license = lib.licenses.free;
@ -810,10 +820,10 @@
elpaBuild {
pname = "dired-git-info";
ename = "dired-git-info";
version = "0.2";
version = "0.3.1";
src = fetchurl {
url = "https://elpa.gnu.org/packages/dired-git-info-0.2.el";
sha256 = "1pxn6kmgnynil13gp3m0y8hhkvciy3dcd55psj70pkrj50555dqx";
url = "https://elpa.gnu.org/packages/dired-git-info-0.3.1.el";
sha256 = "1kd0rpw7l32wvwi7q8s0inx4bc66xrl7hkllnlicyczsnzw2z52z";
};
packageRequires = [ emacs ];
meta = {
@ -915,10 +925,10 @@
elpaBuild {
pname = "ebdb";
ename = "ebdb";
version = "0.6.11";
version = "0.6.12";
src = fetchurl {
url = "https://elpa.gnu.org/packages/ebdb-0.6.11.tar";
sha256 = "1ljcp4vy8z5xbcrlf33xgi63a2px4fhx6928qhwr7sy7jwil2s6n";
url = "https://elpa.gnu.org/packages/ebdb-0.6.12.tar";
sha256 = "1nmg2xxhfv53h13kb19xd4x5a7q9ln2rwzxc5v8zkf0g3vaxmhxw";
};
packageRequires = [ cl-lib emacs seq ];
meta = {
@ -975,10 +985,10 @@
elpaBuild {
pname = "eev";
ename = "eev";
version = "20190902";
version = "20191105";
src = fetchurl {
url = "https://elpa.gnu.org/packages/eev-20190902.tar";
sha256 = "09rk01ykvvmmzadkz8y064iil2sm6ql6qz0rj0jjlnyymi58lj69";
url = "https://elpa.gnu.org/packages/eev-20191105.tar";
sha256 = "0w53wq8nhl4c5czda5imcv7y97pgbpfmvcvpi6kdwl3jbn9cx396";
};
packageRequires = [ emacs ];
meta = {
@ -995,10 +1005,10 @@
elpaBuild {
pname = "eglot";
ename = "eglot";
version = "1.4";
version = "1.5";
src = fetchurl {
url = "https://elpa.gnu.org/packages/eglot-1.4.tar";
sha256 = "1f2p08ly17jcjyh8bif5zhmzhx3z5y9z20v6z24z939bqk8c1pdq";
url = "https://elpa.gnu.org/packages/eglot-1.5.tar";
sha256 = "00ifgz9r9xvy19zsz1yfls6n1acvms14p86nbw0x6ldjgvpf279i";
};
packageRequires = [ emacs flymake jsonrpc ];
meta = {
@ -1056,6 +1066,21 @@
license = lib.licenses.free;
};
}) {};
elisp-benchmarks = callPackage ({ elpaBuild, fetchurl, lib }:
elpaBuild {
pname = "elisp-benchmarks";
ename = "elisp-benchmarks";
version = "1.1";
src = fetchurl {
url = "https://elpa.gnu.org/packages/elisp-benchmarks-1.1.tar";
sha256 = "1blx3wsnz94va67437rmqa4i90gm9ig69bbmg9lp6jqyxrm9d7gc";
};
packageRequires = [];
meta = {
homepage = "https://elpa.gnu.org/packages/elisp-benchmarks.html";
license = lib.licenses.free;
};
}) {};
enwc = callPackage ({ elpaBuild, emacs, fetchurl, lib }:
elpaBuild {
pname = "enwc";
@ -1327,10 +1352,10 @@
elpaBuild {
pname = "gnorb";
ename = "gnorb";
version = "1.6.3";
version = "1.6.4";
src = fetchurl {
url = "https://elpa.gnu.org/packages/gnorb-1.6.3.tar";
sha256 = "14cdldlvq2fx0j9g1bbzb6dq7yp9rw6bv39sls67i2p35h3gc2gd";
url = "https://elpa.gnu.org/packages/gnorb-1.6.4.tar";
sha256 = "1zm4c48x9vjqyn1h60lphhnzzg005wcad5mzn0majk0h59qcr0ff";
};
packageRequires = [ cl-lib ];
meta = {
@ -1379,10 +1404,10 @@
elpaBuild {
pname = "gnus-mock";
ename = "gnus-mock";
version = "0.4.2";
version = "0.4.4";
src = fetchurl {
url = "https://elpa.gnu.org/packages/gnus-mock-0.4.2.tar";
sha256 = "04ddmdm2hykgr2y9g2yzmjp3fszhaywwnxhiy608cpvqdjwlwhn7";
url = "https://elpa.gnu.org/packages/gnus-mock-0.4.4.tar";
sha256 = "0v94z800f1y3ylbgbrw4nslqm7j2jr592g402nxgj9rlldazzxg0";
};
packageRequires = [];
meta = {
@ -1521,10 +1546,10 @@
elpaBuild {
pname = "hyperbole";
ename = "hyperbole";
version = "7.0.3";
version = "7.0.6";
src = fetchurl {
url = "https://elpa.gnu.org/packages/hyperbole-7.0.3.tar";
sha256 = "1mvplaxfjji00gg8rkhidfsdl8knwi6c0ai149zm4djsfaww3ikh";
url = "https://elpa.gnu.org/packages/hyperbole-7.0.6.tar";
sha256 = "08gi4v76s53nfmn3s0qcxc3zii0pspjfd6ry7jq1kgm3z34x8hab";
};
packageRequires = [ emacs ];
meta = {
@ -1566,10 +1591,10 @@
elpaBuild {
pname = "ivy";
ename = "ivy";
version = "0.12.0";
version = "0.13.0";
src = fetchurl {
url = "https://elpa.gnu.org/packages/ivy-0.12.0.tar";
sha256 = "14q9kh48iabrnhwcmhlvgk7sg4a0j5c3zjp0yzj1ijrz5zbdhxxz";
url = "https://elpa.gnu.org/packages/ivy-0.13.0.tar";
sha256 = "18r9vb9v7hvdkylchn436sgh7ji9avhry1whjip8zrn0c1bnqmk8";
};
packageRequires = [ emacs ];
meta = {
@ -1596,10 +1621,10 @@
elpaBuild {
pname = "javaimp";
ename = "javaimp";
version = "0.6.1";
version = "0.7.1";
src = fetchurl {
url = "https://elpa.gnu.org/packages/javaimp-0.6.1.tar";
sha256 = "1piaj3sgw6a05g9dhkbrn5n490w01z1rxa7axh3vg1xqbs6rv7lw";
url = "https://elpa.gnu.org/packages/javaimp-0.7.1.tar";
sha256 = "0i93akp9jhlpgbm454wkjhir8cbzhfjb97cxxlk8n4pgzbh481l3";
};
packageRequires = [];
meta = {
@ -1656,10 +1681,10 @@
elpaBuild {
pname = "jsonrpc";
ename = "jsonrpc";
version = "1.0.7";
version = "1.0.9";
src = fetchurl {
url = "https://elpa.gnu.org/packages/jsonrpc-1.0.7.el";
sha256 = "19z35gjphl4mlgpznfhlccgipnmbb3c1dvax48f4hw8qwksfcac1";
url = "https://elpa.gnu.org/packages/jsonrpc-1.0.9.el";
sha256 = "1ncsdv9pr2zsfa9mxm4n68fppnkpm410mh72r7h5f8yj17lz00ss";
};
packageRequires = [ emacs ];
meta = {
@ -1802,16 +1827,16 @@
license = lib.licenses.free;
};
}) {};
loccur = callPackage ({ cl-lib ? null, elpaBuild, fetchurl, lib }:
loccur = callPackage ({ elpaBuild, emacs, fetchurl, lib }:
elpaBuild {
pname = "loccur";
ename = "loccur";
version = "1.2.3";
version = "1.2.4";
src = fetchurl {
url = "https://elpa.gnu.org/packages/loccur-1.2.3.el";
sha256 = "09pxp03g4pg95cpqiadyv9dz6qrwd9igrkwrhm4s38cscmqm7dzq";
url = "https://elpa.gnu.org/packages/loccur-1.2.4.el";
sha256 = "00f1ifa4z5ay90bd2002fmj83d7xqzrcr9018q8crlypmpxkyh7j";
};
packageRequires = [ cl-lib ];
packageRequires = [ emacs ];
meta = {
homepage = "https://elpa.gnu.org/packages/loccur.html";
license = lib.licenses.free;
@ -2070,10 +2095,10 @@
elpaBuild {
pname = "nhexl-mode";
ename = "nhexl-mode";
version = "1.3";
version = "1.4";
src = fetchurl {
url = "https://elpa.gnu.org/packages/nhexl-mode-1.3.el";
sha256 = "1fcy4ybr12dvswmzaqkv4798snb1x1y7ldxwrsgjv5sx7bb5j60z";
url = "https://elpa.gnu.org/packages/nhexl-mode-1.4.el";
sha256 = "1c2q4w6hkvc7z4026hiqdcsm2scd6q35x7b0dk80h8qicsbi14c8";
};
packageRequires = [ cl-lib emacs ];
meta = {
@ -2160,10 +2185,10 @@
elpaBuild {
pname = "objed";
ename = "objed";
version = "0.8.1";
version = "0.8.3";
src = fetchurl {
url = "https://elpa.gnu.org/packages/objed-0.8.1.tar";
sha256 = "06jfvz7avh28p1ghyd1aik5lz8k9fnv1fcxh9npm1iyvh810y5zj";
url = "https://elpa.gnu.org/packages/objed-0.8.3.tar";
sha256 = "1s38d6bvggdk5p45ww1jb4gxifzgjwgw1m6ar920nlg0j4fgbcvr";
};
packageRequires = [ cl-lib emacs ];
meta = {
@ -2220,10 +2245,10 @@
elpaBuild {
pname = "org";
ename = "org";
version = "9.2.6";
version = "9.3.1";
src = fetchurl {
url = "https://elpa.gnu.org/packages/org-9.2.6.tar";
sha256 = "0ikd78k4yw4sm5x7l3dsbvfcmvnv5qna2mxirr560gvcnzhr0zg4";
url = "https://elpa.gnu.org/packages/org-9.3.1.tar";
sha256 = "1n79h6ihhsaxxbnl9hw511aav0215m3pa51sa5fh3ddknjfplian";
};
packageRequires = [];
meta = {
@ -2250,10 +2275,10 @@
elpaBuild {
pname = "orgalist";
ename = "orgalist";
version = "1.9";
version = "1.11";
src = fetchurl {
url = "https://elpa.gnu.org/packages/orgalist-1.9.el";
sha256 = "1rmmcyiiqkq54hn74nhzxzl4nvd902hv6gq341jwhrm7yiagffi6";
url = "https://elpa.gnu.org/packages/orgalist-1.11.el";
sha256 = "0zbqkk540rax32s8szp5zgz3a02zw88fc1dmjmyw6h3ls04m91kl";
};
packageRequires = [ emacs ];
meta = {
@ -2265,10 +2290,10 @@
elpaBuild {
pname = "osc";
ename = "osc";
version = "0.1";
version = "0.2";
src = fetchurl {
url = "https://elpa.gnu.org/packages/osc-0.1.el";
sha256 = "09nzbbzvxfrjm91wawbv6bg6fqlcx1qi0711qc73yfrbc8ndsnsb";
url = "https://elpa.gnu.org/packages/osc-0.2.el";
sha256 = "1b1ck9kb9mkyd7nlj4cqahsshar6h8mpvqss6n3dp4cl3r6dk1sw";
};
packageRequires = [];
meta = {
@ -2385,10 +2410,10 @@
elpaBuild {
pname = "phps-mode";
ename = "phps-mode";
version = "0.3.1";
version = "0.3.27";
src = fetchurl {
url = "https://elpa.gnu.org/packages/phps-mode-0.3.1.tar";
sha256 = "1h6s5k156mbbkaysb07vcb13k3izs91pwigzcfh6jvv3lak4azg5";
url = "https://elpa.gnu.org/packages/phps-mode-0.3.27.tar";
sha256 = "1p6s5dq0d1ysm993qax8mi2sv4956ggainfbhsn65gc2718kjd0h";
};
packageRequires = [ emacs ];
meta = {
@ -2505,10 +2530,10 @@
elpaBuild {
pname = "rainbow-mode";
ename = "rainbow-mode";
version = "1.0.1";
version = "1.0.3";
src = fetchurl {
url = "https://elpa.gnu.org/packages/rainbow-mode-1.0.1.el";
sha256 = "0cpga4ax635rfpj7y2vmh7ank0yw00dcy20gjg1mj74r97by8csf";
url = "https://elpa.gnu.org/packages/rainbow-mode-1.0.3.el";
sha256 = "0cpwqllhv3cb0gii22cj9i731rk3sbf2drm5m52w5yclm8sfr339";
};
packageRequires = [];
meta = {
@ -2725,10 +2750,10 @@
elpaBuild {
pname = "relint";
ename = "relint";
version = "1.10";
version = "1.11";
src = fetchurl {
url = "https://elpa.gnu.org/packages/relint-1.10.el";
sha256 = "1l0lh4pkksw7brmhhbaikwzs4zkgd2962ks1zy7m262dvkhxjfv8";
url = "https://elpa.gnu.org/packages/relint-1.11.tar";
sha256 = "0c7d35kp5k11fnyjrq9cg8i2r005gs57pmb3rvpf8ilwv0scn1m7";
};
packageRequires = [ xr ];
meta = {
@ -3046,10 +3071,10 @@
elpaBuild {
pname = "svg-clock";
ename = "svg-clock";
version = "1.0";
version = "1.1";
src = fetchurl {
url = "https://elpa.gnu.org/packages/svg-clock-1.0.el";
sha256 = "0j6zk7fsv72af12phqdw8axbn2y8y4rfgxiab1p3pxq3y7k47jid";
url = "https://elpa.gnu.org/packages/svg-clock-1.1.el";
sha256 = "12wf4dd3vgbq1v3363cil4wr2skx60xy546jc69ycyk0jq7plcq3";
};
packageRequires = [ emacs svg ];
meta = {
@ -3151,10 +3176,10 @@
elpaBuild {
pname = "tramp";
ename = "tramp";
version = "2.4.2.2";
version = "2.4.3";
src = fetchurl {
url = "https://elpa.gnu.org/packages/tramp-2.4.2.2.tar";
sha256 = "0bjfnxxyn8xgw10ybnjrza2gfwqifa3q7rh0bp6pidlhg45718p8";
url = "https://elpa.gnu.org/packages/tramp-2.4.3.tar";
sha256 = "0m1w4myvd2z293xcdyj7yc75b108f67f98i0jrizhdaj9nlw8j5f";
};
packageRequires = [ emacs ];
meta = {
@ -3211,10 +3236,10 @@
elpaBuild {
pname = "undo-tree";
ename = "undo-tree";
version = "0.6.5";
version = "0.7";
src = fetchurl {
url = "https://elpa.gnu.org/packages/undo-tree-0.6.5.el";
sha256 = "0bs97xyxwfkjvzax9llg0zsng0vyndnrxj5d2n5mmynaqcn89d37";
url = "https://elpa.gnu.org/packages/undo-tree-0.7.el";
sha256 = "0mc5spiqx20z8vh8b24dp9hqj27h5bm5wqk0ga7c6s6mp69r72h4";
};
packageRequires = [];
meta = {
@ -3241,10 +3266,10 @@
elpaBuild {
pname = "uniquify-files";
ename = "uniquify-files";
version = "1.0.1";
version = "1.0.2";
src = fetchurl {
url = "https://elpa.gnu.org/packages/uniquify-files-1.0.1.tar";
sha256 = "0c4lf25503z71wz9f0v6ag5lmqfxz94lmq65xvzvhmqvkxvsgpm5";
url = "https://elpa.gnu.org/packages/uniquify-files-1.0.2.tar";
sha256 = "1vib79wsz5k94b9z0wiwhbzsdm70y9dla6szw2bb75245cx3kr0h";
};
packageRequires = [ emacs ];
meta = {
@ -3327,10 +3352,10 @@
elpaBuild {
pname = "verilog-mode";
ename = "verilog-mode";
version = "2019.9.23.4801067";
version = "2019.12.17.268053413";
src = fetchurl {
url = "https://elpa.gnu.org/packages/verilog-mode-2019.9.23.4801067.el";
sha256 = "0jaba2cqiiknjqc7qz6047hpjd87hznl6b154ai1900i52kjbs21";
url = "https://elpa.gnu.org/packages/verilog-mode-2019.12.17.268053413.el";
sha256 = "0g42sd5s5xrck077qx0p2sldnr2f9qm8dsr9d0fiaks294sk8li8";
};
packageRequires = [];
meta = {
@ -3480,10 +3505,10 @@
elpaBuild {
pname = "websocket";
ename = "websocket";
version = "1.11.1";
version = "1.12";
src = fetchurl {
url = "https://elpa.gnu.org/packages/websocket-1.11.1.tar";
sha256 = "09s8qyi012djmm3vrj1qg1zqqy0h0cbcfzfkhybvqi4amy4jgliw";
url = "https://elpa.gnu.org/packages/websocket-1.12.tar";
sha256 = "0ap4z80c6pzpb69wrx0hsvwzignxmd2b9xy974by9gf5xm2wpa8w";
};
packageRequires = [ cl-lib ];
meta = {
@ -3506,6 +3531,21 @@
license = lib.licenses.free;
};
}) {};
windower = callPackage ({ elpaBuild, emacs, fetchurl, lib }:
elpaBuild {
pname = "windower";
ename = "windower";
version = "0.0.1";
src = fetchurl {
url = "https://elpa.gnu.org/packages/windower-0.0.1.el";
sha256 = "19xizbfbnzhhmhlqy20ir1a1y87bjwrq67bcawxy6nxpkwbizsv7";
};
packageRequires = [ emacs ];
meta = {
homepage = "https://elpa.gnu.org/packages/windower.html";
license = lib.licenses.free;
};
}) {};
windresize = callPackage ({ elpaBuild, fetchurl, lib }:
elpaBuild {
pname = "windresize";
@ -3635,10 +3675,10 @@
elpaBuild {
pname = "yasnippet";
ename = "yasnippet";
version = "0.13.0";
version = "0.14.0";
src = fetchurl {
url = "https://elpa.gnu.org/packages/yasnippet-0.13.0.tar";
sha256 = "1klnvyy8az3h7i7yrpbhad4ka9p9vlgf918ha1vf7i3wbcbm0s5z";
url = "https://elpa.gnu.org/packages/yasnippet-0.14.0.tar";
sha256 = "1lbil3dyz43nmr2lvx9vhpybqynpb7shg7m1xl1f7j4vm4dh0r08";
};
packageRequires = [ cl-lib ];
meta = {

View File

@ -2,16 +2,16 @@
buildGoModule rec {
pname = "glow";
version = "0.1.6";
version = "0.2.0";
src = fetchFromGitHub {
owner = "charmbracelet";
repo = "glow";
rev = "v${version}";
sha256 = "0q35napi1aa6dfrqz26hvhzijymb9sxsf3mrrn1mh7ssgkhvmqqc";
sha256 = "0vhl8d7xxqqyl916nh8sgm1xdaf7xlc3r18464bd2av22q9yz68n";
};
modSha256 = "07imn9p0s79x1h45dk05hjcm6946d84j6k5pnljqrz4zk64hy26c";
modSha256 = "0r0yq7kgz7i1wf4gxxihdrn1c8mi4wcyhadncxbln24s9c5apxsf";
buildFlagsArray = [ "-ldflags=" "-X=main.Version=${version}" ];

View File

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "kdev-php";
version = "5.4.5";
version = "5.4.6";
src = fetchurl {
url = "https://github.com/KDE/${pname}/archive/v${version}.tar.gz";
sha256 = "12j0l2k6ii9ajp90lil3apk0xsz56cb549ighabik73a1w3c6ib6";
sha256 = "0p532r0ld6j6fpwqyf9m5m0d27y37chgbvcjp1x6g5jjvm7m77xk";
};
nativeBuildInputs = [ cmake extra-cmake-modules ];

View File

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "kdev-python";
version = "5.4.5";
version = "5.4.6";
src = fetchurl {
url = "https://github.com/KDE/${pname}/archive/v${version}.tar.gz";
sha256 = "1iq4lxbl8gq4qvydyz34ild4izw21cp22adlz9dc054v0wis331j";
sha256 = "1xzk0zgbc4nnz8gjbhw5h6kwznzxsqrg19ggyb8ijpmgg0ncxk8l";
};
cmakeFlags = [

View File

@ -10,11 +10,11 @@
mkDerivation rec {
pname = "kdevelop";
version = "5.4.5";
version = "5.4.6";
src = fetchurl {
url = "mirror://kde/stable/${pname}/${version}/src/${pname}-${version}.tar.xz";
sha256 = "08vhbg9ql0402bw3y3xw1kdxhig9sv3ss8g0h4477vy3z17m1h4j";
sha256 = "01jmrmwbc1hrvq7jdfcc7mxl03q2l6kz57yca2j26xwyvfcfv5sz";
};
nativeBuildInputs = [

View File

@ -4,14 +4,14 @@ with stdenv.lib;
pythonPackages.buildPythonApplication rec {
pname = "neovim-remote";
version = "2.2.3";
version = "2.4.0";
disabled = !pythonPackages.isPy3k;
src = fetchFromGitHub {
owner = "mhinz";
repo = "neovim-remote";
rev = "v${version}";
sha256 = "0g7gs5gigk3krydxdpmscgfr1fms0a6rc6am2y4c5szkgbd1d0ph";
sha256 = "0jlw0qksak4bdzddpsj74pm2f2bgpj3cwrlspdjjy0j9qzg0mpl9";
};
propagatedBuildInputs = with pythonPackages; [ pynvim psutil ];

View File

@ -101,7 +101,7 @@ let
# Only display the log on error since it will contain a few normally
# irrelevant messages.
if ! $out/bin/nvim \
-u ${vimUtils.vimrcFile (configure // { customRC = ""; beforePlugins = ''filetype indent plugin on | syn on''; })} \
-u ${vimUtils.vimrcFile (configure // { customRC = ""; })} \
-i NONE -n \
-E -V1rplugins.log -s \
+UpdateRemotePlugins +quit! > outfile 2>&1; then

View File

@ -4,13 +4,13 @@
stdenv.mkDerivation rec {
pname = "quilter";
version = "2.0.4";
version = "2.0.5";
src = fetchFromGitHub {
owner = "lainsce";
repo = pname;
rev = version;
sha256 = "1vbckx4c2bklzxpqz332sjcyrx1xl1j0cp8778fvpl7x7mlnz460";
sha256 = "1gij5gqidzvwym7yq5fx0344n4fkydx5diwz8g8kwkcsz7z1w45a";
};
nativeBuildInputs = [

View File

@ -7,13 +7,13 @@
stdenv.mkDerivation rec {
pname = "azpainter";
version = "2.1.4";
version = "2.1.5";
src = fetchFromGitHub {
owner = "Symbian9";
repo = pname;
rev = "refs/tags/v${version}";
sha256 = "1hrr9lhsbjyzar3nxvli6cazr7zhyzh0p8hwpg4g9ga6njs8vi8m";
sha256 = "0x5jmsprjissqcvwq75pqq9wgv4k9b7cy507hai8xk6xs3vxwgba";
};
nativeBuildInputs = [ autoreconfHook ];

View File

@ -10,11 +10,11 @@ with stdenv.lib;
perlPackages.buildPerlPackage rec {
pname = "gscan2pdf";
version = "2.6.2";
version = "2.6.3";
src = fetchurl {
url = "mirror://sourceforge/gscan2pdf/${version}/${pname}-${version}.tar.xz";
sha256 = "0z35lglf4anfczizynjp8sd1jpix5mkmm1nh39n1v94l7ahjxsww";
sha256 = "1chmk51xwylnjrgc6hw23x7g7cpwzgwmjc49fcah7pkd3dk1cvvr";
};
nativeBuildInputs = [ wrapGAppsHook ];

View File

@ -2,18 +2,18 @@
, freeimage, fontconfig, pkgconfig
, asciidoc, docbook_xsl, libxslt, cmocka
, librsvg, pango, libxkbcommon, wayland
, libGLU
, libGLU, icu
}:
stdenv.mkDerivation rec {
pname = "imv";
version = "4.0.1";
version = "4.1.0";
src = fetchFromGitHub {
owner = "eXeC64";
repo = "imv";
rev = "v${version}";
sha256 = "sha256:01fbkbwwsyr00k3mwans8jfb9p4gl02v6z62vgx0pkgrzxjkcz07";
sha256 = "sha256:0gk8g178i961nn3bls75a8qpv6wvfvav6hd9lxca1skaikd33zdx";
};
preBuild = ''
@ -36,6 +36,7 @@ stdenv.mkDerivation rec {
pango
pkgconfig
wayland
icu
];
installFlags = [ "PREFIX=$(out)" "CONFIGPREFIX=$(out)/etc" ];

View File

@ -9,11 +9,23 @@
stdenv.mkDerivation rec {
pname = "kodelife";
version = "0.8.7.105";
version = "0.8.8.110";
suffix = {
aarch64-linux = "linux-arm64";
armv7l-linux = "linux-armhf";
x86_64-darwin = "macos";
x86_64-linux = "linux-x86_64";
}.${stdenv.hostPlatform.system} or (throw "Unsupported system: ${stdenv.hostPlatform.system}");
src = fetchzip {
url = "https://hexler.net/pub/${pname}/${pname}-${version}-linux-x86_64.zip";
sha256 = "0ld4lwigzwlikx04qy3gskqqg0wzlk8m3ccrd704ifl8lsp46n5r";
url = "https://hexler.net/pub/${pname}/${pname}-${version}-${suffix}.zip";
sha256 = {
aarch64-linux = "1lcpj1mgkvksq1d08ibh59y0dmdh7zm77wi5ziqhg3p5g9nxyasd";
armv7l-linux = "0sljy06302x567jqw5lagbyhpc3j140jk4wccacxjrbb6hcx3l42";
x86_64-darwin = "1b058s9kny026q395nj99v8hggxkgv43nnjkmx1a2siajw0db94c";
x86_64-linux = "1q77cpz4gflrvfz6dm6np8sqbwyr235gq7y4pzs4hnqbrdzd4nwl";
}.${stdenv.hostPlatform.system} or (throw "Unsupported system: ${stdenv.hostPlatform.system}");
};
dontConfigure = true;
@ -36,7 +48,7 @@ stdenv.mkDerivation rec {
libGLU libGL
xorg.libX11
];
in ''
in stdenv.lib.optionalString (!stdenv.isDarwin) ''
patchelf \
--set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" \
--set-rpath "${libPath}" \
@ -48,6 +60,6 @@ stdenv.mkDerivation rec {
description = "Real-time GPU shader editor";
license = licenses.unfree;
maintainers = with maintainers; [ prusnak ];
platforms = [ "x86_64-linux" ];
platforms = [ "aarch64-linux" "armv7l-linux" "x86_64-darwin" "x86_64-linux" ];
};
}

View File

@ -1,9 +1,9 @@
{ stdenv, fetchurl, cmake, exiv2, graphicsmagick, libraw, fetchpatch
{ mkDerivation, stdenv, fetchurl, cmake, exiv2, graphicsmagick, libraw, fetchpatch
, qtbase, qtdeclarative, qtmultimedia, qtquickcontrols, qttools, qtgraphicaleffects
, extra-cmake-modules, poppler, kimageformats, libarchive, libdevil
}:
stdenv.mkDerivation rec {
mkDerivation rec {
pname = "photoqt";
version = "1.7.1";

View File

@ -5,6 +5,7 @@
cmake,
gzip,
installShellFiles,
makeWrapper,
ncurses,
pkgconfig,
@ -52,20 +53,21 @@ let
];
in buildRustPackage rec {
pname = "alacritty";
version = "0.4.0";
version = "0.4.1";
src = fetchFromGitHub {
owner = "jwilm";
repo = pname;
rev = "v${version}";
sha256 = "0adaqdbma6gskb2g14yscrgr9gch5wf2g2clchplv72c2qr1k427";
sha256 = "05jcg33ifngpzw2hdhgb614j87ihhhlqgar0kky183rywg0dxikg";
};
cargoSha256 = "1r267g8f986nxh8ms5yhp50qy1yl8gly2jr78p738qqc6frlxlhv";
cargoSha256 = "1kc9n10kb4j87x337pzl6wpi0qj5ib2mqmrjag2yld3138dag71n";
nativeBuildInputs = [
cmake
gzip
installShellFiles
makeWrapper
ncurses
pkgconfig
@ -93,9 +95,9 @@ in buildRustPackage rec {
patchelf --set-rpath "${stdenv.lib.makeLibraryPath rpathLibs}" $out/bin/alacritty
'') + ''
install -D extra/completions/_alacritty -t "$out/share/zsh/site-functions/"
install -D extra/completions/alacritty.bash -t "$out/etc/bash_completion.d/"
install -D extra/completions/alacritty.fish -t "$out/share/fish/vendor_completions.d/"
installShellCompletion --zsh extra/completions/_alacritty
installShellCompletion --bash extra/completions/alacritty.bash
installShellCompletion --fish extra/completions/alacritty.fish
install -dm 755 "$out/share/man/man1"
gzip -c extra/alacritty.man > "$out/share/man/man1/alacritty.1.gz"
@ -112,9 +114,9 @@ in buildRustPackage rec {
meta = with stdenv.lib; {
description = "GPU-accelerated terminal emulator";
homepage = https://github.com/jwilm/alacritty;
license = with licenses; [ asl20 ];
homepage = "https://github.com/jwilm/alacritty";
license = licenses.asl20;
maintainers = with maintainers; [ filalex77 mic92 ];
platforms = [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ];
platforms = platforms.unix;
};
}

View File

@ -2,11 +2,11 @@
pythonPackages.buildPythonApplication rec {
pname = "cherrytree";
version = "0.38.9";
version = "0.38.10";
src = fetchurl {
url = "https://www.giuspen.com/software/${pname}-${version}.tar.xz";
sha256 = "0xal09ijgxbzvp003s40xbrfnpq3ald1fw8nnpqq3yg7h6g6c5pw";
sha256 = "1bj83b7lwqir13fp9slcdn8mgign06vywy42x8zvsp22fjn4p7f7";
};
nativeBuildInputs = [ gettext ];

View File

@ -7,7 +7,7 @@
stdenv.mkDerivation rec {
pname = "dbeaver-ce";
version = "6.3.1";
version = "6.3.2";
desktopItem = makeDesktopItem {
name = "dbeaver";
@ -30,7 +30,7 @@ stdenv.mkDerivation rec {
src = fetchurl {
url = "https://dbeaver.io/files/${version}/dbeaver-ce-${version}-linux.gtk.x86_64.tar.gz";
sha256 = "0w7nsxm66gbwwql1zk7zh6zvvsjcn78qm0miv0w8nj1qk24zr494";
sha256 = "0yr79p4vdg6s6c8sry6qnf2ifjcjdapn0sff2crsnz331rsh27sm";
};
installPhase = ''

View File

@ -2,11 +2,11 @@
python3Packages.buildPythonApplication rec {
pname = "gallery_dl";
version = "1.12.1";
version = "1.12.2";
src = python3Packages.fetchPypi {
inherit pname version;
sha256 = "0y1bzqcacik030zjchgmzrwpfb49mi68wdivli9ydb64w1zql9im";
sha256 = "013bavyqvnay38c844n1jvirsmj807f0wg2qlclkdghkj316p1pz";
};
doCheck = false;

View File

@ -2,13 +2,13 @@
mkDerivation rec {
pname = "heimer";
version = "1.12.0";
version = "1.13.1";
src = fetchFromGitHub {
owner = "juzzlin";
repo = pname;
rev = version;
sha256 = "1gw4w6cvr3vb4zdb1kq8gwmadh2lb0jd0bd2hc7cw2d5kdbjaln7";
sha256 = "1s6s5rlzr917hq7370pmikbdvd6y468cyxw614ah65d4v105qfv7";
};
nativeBuildInputs = [ cmake ];

View File

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "josm";
version = "15553";
version = "15628";
src = fetchurl {
url = "https://josm.openstreetmap.de/download/josm-snapshot-${version}.jar";
sha256 = "07kkc19r9xkb5jim26nnmajp0jzvg3absgx55z5qnna6r189ba2j";
sha256 = "19yn311h9x1434832i0awpv2n9vhbhkk1j1mi9zggy9f256f80c6";
};
buildInputs = [ jdk11 makeWrapper ];

View File

@ -1,4 +1,4 @@
{ stdenv, fetchFromGitHub, cmake, makeWrapper, qttools
{ stdenv, fetchFromGitHub, cmake, makeWrapper, qttools, darwin
, curl
, glibcLocales
@ -7,7 +7,6 @@
, libargon2
, libgcrypt
, libgpgerror
, libmicrohttpd
, libsodium
, libyubikey
, pkg-config
@ -93,7 +92,6 @@ stdenv.mkDerivation rec {
libargon2
libgcrypt
libgpgerror
libmicrohttpd
libsodium
libyubikey
pkg-config
@ -105,7 +103,8 @@ stdenv.mkDerivation rec {
zlib
]
++ stdenv.lib.optional withKeePassKeeShareSecure quazip
++ stdenv.lib.optional stdenv.isDarwin qtmacextras;
++ stdenv.lib.optional stdenv.isDarwin qtmacextras
++ stdenv.lib.optional (stdenv.isDarwin && withKeePassTouchID) darwin.apple_sdk.frameworks.LocalAuthentication;
preFixup = optionalString stdenv.isDarwin ''
# Make it work without Qt in PATH.
@ -118,6 +117,6 @@ stdenv.mkDerivation rec {
homepage = https://keepassxc.org/;
license = licenses.gpl2;
maintainers = with maintainers; [ jonafato ];
platforms = with platforms; linux ++ darwin;
platforms = platforms.linux ++ platforms.darwin;
};
}

View File

@ -3,11 +3,11 @@
mkDerivation rec {
pname = "latte-dock";
version = "0.9.5";
version = "0.9.7";
src = fetchurl {
url = "https://download.kde.org/stable/${pname}/${pname}-${version}.tar.xz";
sha256 = "1g8a2lmg9agcs2kwbwh6sj9hkrbzad7bkxk39nx5536svnifbg1c";
sha256 = "1b8yz6r6x46xajx900m8s0sjfwiwbpp6nfb780ygfcz6inb1234q";
name = "${pname}-${version}.tar.xz";
};

View File

@ -1,8 +1,23 @@
{ mkDerivation, lib, fetchFromGitHub, installShellFiles, pkgconfig, runtimeShell
, qtbase, qtsvg, qttools, qmake
, boost, muparser }:
{ boost
, fetchFromGitHub
, installShellFiles
, mkDerivationWith
, muparser
, pkgconfig
, qmake
, qtbase
, qtsvg
, qttools
, runtimeShell
, gcc8Stdenv
}:
mkDerivation rec {
let
stdenv = gcc8Stdenv;
in
# Doesn't build with gcc9
mkDerivationWith stdenv.mkDerivation rec {
pname = "librecad";
version = "2.2.0-rc1";
@ -13,7 +28,9 @@ mkDerivation rec {
sha256 = "0kwj838hqzbw95gl4x6scli9gj3gs72hdmrrkzwq5rjxam18k3f3";
};
patches = [ ./fix_qt_5_11_build.patch ];
patches = [
./fix_qt_5_11_build.patch
];
postPatch = ''
substituteInPlace scripts/postprocess-unix.sh \
@ -48,17 +65,30 @@ mkDerivation rec {
runHook postInstall
'';
buildInputs = [ boost muparser qtbase qtsvg ];
buildInputs = [
boost
muparser
qtbase
qtsvg
];
nativeBuildInputs = [ installShellFiles pkgconfig qmake qttools ];
nativeBuildInputs = [
installShellFiles
pkgconfig
qmake
qttools
];
enableParallelBuilding = true;
meta = with lib; {
meta = with stdenv.lib; {
description = "2D CAD package based on Qt";
homepage = "https://librecad.org";
license = licenses.gpl2;
maintainers = with maintainers; [ viric ];
maintainers = with maintainers; [
kiwi
viric
];
platforms = platforms.linux;
};
}

View File

@ -3,14 +3,14 @@
}:
stdenv.mkDerivation rec {
name = "xterm-349";
name = "xterm-351";
src = fetchurl {
urls = [
"ftp://ftp.invisible-island.net/xterm/${name}.tgz"
"https://invisible-mirror.net/archives/xterm/${name}.tgz"
];
sha256 = "0ps7b2b2kbrkv5q49cmb8c51z0w21jmm7hwciw30m6jgfb9s79ir";
sha256 = "05kf586my4irrzz2bxgmwjdvynyrg9ybhvfqmx29g70w4888l2kn";
};
buildInputs =

View File

@ -110,12 +110,16 @@ let
url = "https://raw.githubusercontent.com/archlinuxarm/PKGBUILDs/09c7fa0dc1d87922e3b464c0fa084df1227fca79/extra/firefox/build-arm-libopus.patch";
sha256 = "1zg56v3lc346fkzcjjx21vjip2s9hb2xw4pvza1dsfdnhsnzppfp";
})
] ++ lib.optional (lib.versionAtLeast ffversion "71") ./fix-ff71-lto.patch
]
++ lib.optional (lib.versionAtLeast ffversion "71") (fetchpatch {
url = "https://phabricator.services.mozilla.com/D56873?download=true";
sha256 = "183949phd2n27nhiq85a04j4fjn0jxmldic6wcjrczsd8g2rrr5k";
})
++ patches;
in
stdenv.mkDerivation rec {
stdenv.mkDerivation (rec {
name = "${pname}-unwrapped-${version}";
version = browserVersion;
@ -363,4 +367,18 @@ stdenv.mkDerivation rec {
inherit browserName;
} // lib.optionalAttrs gtk3Support { inherit gtk3; };
}
} //
# the build system verifies checksums of the bundled rust sources
# ./third_party/rust is be patched by our libtool fixup code in stdenv
# unfortunately we can't just set this to `false` when we do not want it.
# See https://github.com/NixOS/nixpkgs/issues/77289 for more details
lib.optionalAttrs (lib.versionAtLeast ffversion "72") {
# Ideally we would figure out how to tell the build system to not
# care about changed hashes as we are already doing that when we
# fetch the sources. Any further modifications of the source tree
# is on purpose by some of our tool (or by accident and a bug?).
dontFixLibtool = true;
# on aarch64 this is also required
dontUpdateAutotoolsGnuConfigScripts = true;
})

View File

@ -1,100 +0,0 @@
Original Patch: https://bugzilla.mozilla.org/show_bug.cgi?id=1601707#c6
Also fixes the issues with dom localstorage.
--- a/dom/indexedDB/ActorsParent.cpp
+++ b/dom/indexedDB/ActorsParent.cpp
@@ -24311,11 +24311,11 @@ nsresult ObjectStoreAddOrPutRequestOp::DoDatabaseWork(
// if we allow overwrite or not. By not allowing overwrite we raise
// detectable errors rather than corrupting data.
DatabaseConnection::CachedStatement stmt;
- const auto& optReplaceDirective = (!mOverwrite || keyUnset)
- ? NS_LITERAL_CSTRING("")
- : NS_LITERAL_CSTRING("OR REPLACE ");
rv = aConnection->GetCachedStatement(
- NS_LITERAL_CSTRING("INSERT ") + optReplaceDirective +
+ NS_LITERAL_CSTRING("INSERT ") +
+ ((!mOverwrite || keyUnset)
+ ? NS_LITERAL_CSTRING("")
+ : NS_LITERAL_CSTRING("OR REPLACE ")) +
NS_LITERAL_CSTRING("INTO object_data "
"(object_store_id, key, file_ids, data) "
"VALUES (:") +
@@ -26076,9 +26076,6 @@ nsresult Cursor::OpenOp::DoIndexDatabaseWork(DatabaseConnection* aConnection) {
const bool usingKeyRange = mOptionalKeyRange.isSome();
- const auto& indexTable = mCursor->mUniqueIndex
- ? NS_LITERAL_CSTRING("unique_index_data")
- : NS_LITERAL_CSTRING("index_data");
NS_NAMED_LITERAL_CSTRING(sortColumn, "sort_column");
@@ -26099,7 +26096,9 @@ nsresult Cursor::OpenOp::DoIndexDatabaseWork(DatabaseConnection* aConnection) {
"object_data.file_ids, "
"object_data.data "
"FROM ") +
- indexTable +
+ (mCursor->mUniqueIndex
+ ? NS_LITERAL_CSTRING("unique_index_data")
+ : NS_LITERAL_CSTRING("index_data")) +
NS_LITERAL_CSTRING(
" AS index_table "
"JOIN object_data "
@@ -26198,9 +26197,6 @@ nsresult Cursor::OpenOp::DoIndexKeyDatabaseWork(
const bool usingKeyRange = mOptionalKeyRange.isSome();
- const auto& table = mCursor->mUniqueIndex
- ? NS_LITERAL_CSTRING("unique_index_data")
- : NS_LITERAL_CSTRING("index_data");
NS_NAMED_LITERAL_CSTRING(sortColumn, "sort_column");
@@ -26218,7 +26214,10 @@ nsresult Cursor::OpenOp::DoIndexKeyDatabaseWork(
NS_LITERAL_CSTRING(
"object_data_key "
" FROM ") +
- table + NS_LITERAL_CSTRING(" WHERE index_id = :") +
+ (mCursor->mUniqueIndex
+ ? NS_LITERAL_CSTRING("unique_index_data")
+ : NS_LITERAL_CSTRING("index_data")) +
+ NS_LITERAL_CSTRING(" WHERE index_id = :") +
kStmtParamNameId;
const auto keyRangeClause =
diff --git a/dom/localstorage/ActorsParent.cpp b/dom/localstorage/ActorsParent.cpp
index 9c46c20670..642cef1701 100644
--- a/dom/localstorage/ActorsParent.cpp
+++ b/dom/localstorage/ActorsParent.cpp
@@ -6959,13 +6959,10 @@ nsresult PrepareDatastoreOp::Start() {
MOZ_ASSERT(!QuotaClient::IsShuttingDownOnBackgroundThread());
MOZ_ASSERT(MayProceed());
- const LSRequestCommonParams& commonParams =
- mForPreload
- ? mParams.get_LSRequestPreloadDatastoreParams().commonParams()
- : mParams.get_LSRequestPrepareDatastoreParams().commonParams();
-
const PrincipalInfo& storagePrincipalInfo =
- commonParams.storagePrincipalInfo();
+ mForPreload
+ ? mParams.get_LSRequestPreloadDatastoreParams().commonParams().storagePrincipalInfo()
+ : mParams.get_LSRequestPrepareDatastoreParams().commonParams().storagePrincipalInfo();
if (storagePrincipalInfo.type() == PrincipalInfo::TSystemPrincipalInfo) {
QuotaManager::GetInfoForChrome(&mSuffix, &mGroup, &mOrigin);
@@ -6996,10 +6993,9 @@ nsresult PrepareDatastoreOp::CheckExistingOperations() {
return NS_ERROR_FAILURE;
}
- const LSRequestCommonParams& commonParams =
- mForPreload
- ? mParams.get_LSRequestPreloadDatastoreParams().commonParams()
- : mParams.get_LSRequestPrepareDatastoreParams().commonParams();
+ const LSRequestCommonParams& preloadCommonParams = mParams.get_LSRequestPreloadDatastoreParams().commonParams();
+ const LSRequestCommonParams& prepareCommonParams = mParams.get_LSRequestPrepareDatastoreParams().commonParams();
+ const LSRequestCommonParams& commonParams = mForPreload ? preloadCommonParams : prepareCommonParams;
const PrincipalInfo& storagePrincipalInfo =
commonParams.storagePrincipalInfo();

View File

@ -16,10 +16,10 @@ in
rec {
firefox = common rec {
pname = "firefox";
ffversion = "71.0";
ffversion = "72.0.1";
src = fetchurl {
url = "mirror://mozilla/firefox/releases/${ffversion}/source/firefox-${ffversion}.source.tar.xz";
sha512 = "0hfjlhwdhfdfzd27d6p3h8ff5m2jphlaipv4zym48bn6g95if1x98q2lb87617bxfm31di4rckjvqb70g9sm3smil6p6bnw2dsvnq1g";
sha512 = "37ryimi6yfpcha4c9mcv8gjk38kia1lr5xrj2lglwsr1jai7qxrcd8ljcry8bg87qfwwb9fa13prmn78f5pzpxr7jf8gnsbvr6adxld";
};
patches = [
@ -89,6 +89,7 @@ rec {
meta = firefox.meta // {
description = "A web browser built from Firefox Extended Support Release source tree";
knownVulnerabilities = [ "Support ended around October 2019." ];
};
updateScript = callPackage ./update.nix {
attrPath = "firefox-esr-60-unwrapped";
@ -99,10 +100,10 @@ rec {
firefox-esr-68 = common rec {
pname = "firefox-esr";
ffversion = "68.3.0esr";
ffversion = "68.4.1esr";
src = fetchurl {
url = "mirror://mozilla/firefox/releases/${ffversion}/source/firefox-${ffversion}.source.tar.xz";
sha512 = "31zisy4l07hhm9yvxz7sx04kz1f5rl20z1w072jxaabi42sw07xr6lcflv88gwl21y902n7vwd1q1zfavpnipn65wap4i0vm8c4m6pr";
sha512 = "3nqchvyr95c9xvz23z0kcqqyx8lskw0lxa3rahiagc7b71pnrk8l40c7327q1wd4y5g16lix0fg04xiy6lqjfycjsrjlfr2y6b51n4d";
};
patches = [
@ -174,6 +175,7 @@ in {
./no-buildconfig.patch
missing-documentation-patch
];
meta.knownVulnerabilities = [ "Support ended around October 2019." ];
};
# Similarly to firefox-esr-52 above.
@ -195,94 +197,8 @@ in {
meta.knownVulnerabilities = [ "Support ended in August 2018." ];
};
}) // (let
tbcommon = args: common (args // {
pname = "tor-browser";
isTorBrowserLike = true;
unpackPhase = ''
# fetchFromGitHub produces ro sources, root dir gets a name that
# is too long for shebangs. fixing
cp -a $src tor-browser
chmod -R +w tor-browser
cd tor-browser
# set times for xpi archives
find . -exec touch -d'2010-01-01 00:00' {} \;
'';
meta = (args.meta or {}) // {
description = "A web browser built from TorBrowser source tree";
longDescription = ''
This is a version of TorBrowser with bundle-related patches
reverted.
I.e. it's a variant of Firefox with less fingerprinting and
some isolation features you can't get with any extensions.
Or, alternatively, a variant of TorBrowser that works like any
other UNIX program and doesn't expect you to run it from a
bundle.
It will use your default Firefox profile if you're not careful
even! Be careful!
It will clash with firefox binary if you install both. But it
should not be a problem because you should run browsers in
separate users/VMs anyway.
Create new profile by starting it as
$ firefox -ProfileManager
and then configure it to use your tor instance.
Or just use `tor-browser-bundle` package that packs this
`tor-browser` back into a sanely-built bundle.
'';
homepage = "https://www.torproject.org/projects/torbrowser.html";
platforms = lib.platforms.unix;
license = with lib.licenses; [ mpl20 bsd3 ];
};
});
in rec {
tor-browser-7-5 = (tbcommon {
ffversion = "52.9.0esr";
tbversion = "7.5.6";
# FIXME: fetchFromGitHub is not ideal, unpacked source is >900Mb
src = fetchFromGitHub {
owner = "SLNOS";
repo = "tor-browser";
# branch "tor-browser-52.9.0esr-7.5-2-slnos"
rev = "95bb92d552876a1f4260edf68fda5faa3eb36ad8";
sha256 = "1ykn3yg4s36g2cpzxbz7s995c33ij8kgyvghx38z4i8siaqxdddy";
};
}).override {
gtk3Support = false;
};
tor-browser-8-5 = tbcommon rec {
ffversion = "60.9.0esr";
tbversion = "8.5.6";
# FIXME: fetchFromGitHub is not ideal, unpacked source is >900Mb
src = fetchFromGitHub {
owner = "SLNOS";
repo = "tor-browser";
# branch "tor-browser-60.9.0esr-8.5-2-slnos"
rev = "0489ae3158cd8c0e16c2e78b94083d8cbf0209dc";
sha256 = "0y5s7d8pg8ak990dp8d801j9823igaibfhv9hsa79nib5yllifzs";
};
patches = [
missing-documentation-patch
];
};
tor-browser = tor-browser-8-5;
tor-browser-7-5 = throw "firefoxPackages.tor-browser-7-5 was removed because it was out of date and inadequately maintained. Please use tor-browser-bundle-bin instead. See #77452.";
tor-browser-8-5 = throw "firefoxPackages.tor-browser-8-5 was removed because it was out of date and inadequately maintained. Please use tor-browser-bundle-bin instead. See #77452.";
tor-browser = throw "firefoxPackages.tor-browser was removed because it was out of date and inadequately maintained. Please use tor-browser-bundle-bin instead. See #77452.";
})

View File

@ -48,21 +48,21 @@ in
stdenv.mkDerivation rec {
pname = "google-talk-plugin";
# You can get the upstream version and SHA-1 hash from the following URLs:
# curl -s http://dl.google.com/linux/talkplugin/deb/dists/stable/main/binary-amd64/Packages | grep -E 'Version|SHA1'
# curl -s http://dl.google.com/linux/talkplugin/deb/dists/stable/main/binary-i386/Packages | grep -E 'Version|SHA1'
# You can get the upstream version and SHA-256 hash from the following URLs:
# curl -s http://dl.google.com/linux/talkplugin/deb/dists/stable/main/binary-amd64/Packages | grep -E 'Version|SHA256'
# curl -s http://dl.google.com/linux/talkplugin/deb/dists/stable/main/binary-i386/Packages | grep -E 'Version|SHA256'
version = "5.41.3.0";
src =
if stdenv.hostPlatform.system == "x86_64-linux" then
fetchurl {
url = "${baseURL}/google-talkplugin_${version}-1_amd64.deb";
sha1 = "0bbc3d6997ba22ce712d93e5bc336c894b54fc81";
sha256 = "af7e23d2b6215afc547f96615b99f04e0561557cc58c0c9302364b5a3840d97d";
}
else if stdenv.hostPlatform.system == "i686-linux" then
fetchurl {
url = "${baseURL}/google-talkplugin_${version}-1_i386.deb";
sha1 = "6eae0544858f85c68b0cc46d7786e990bd94f139";
sha256 = "4c46d2b7f2018640288cd7ac49adc47e309d0beadfd979eb03030e672016b4a7";
}
else throw "Google Talk does not support your platform.";

View File

@ -21,12 +21,12 @@ let
in mkDerivationWith python3Packages.buildPythonApplication rec {
pname = "qutebrowser";
version = "1.8.3";
version = "1.9.0";
# the release tarballs are different from the git checkout!
src = fetchurl {
url = "https://github.com/qutebrowser/qutebrowser/releases/download/v${version}/${pname}-${version}.tar.gz";
sha256 = "055zmzk3q0m3hx1742nfy2mdawfllrkvijnbzp1hiv01dj1bxaf8";
sha256 = "1y0yq1qfr6g1s7kf3w2crd0b025dv2dfknhlz3v0001ns3rgwj17";
};
# Needs tox

View File

@ -1,29 +1,20 @@
diff --git a/qutebrowser/app.py b/qutebrowser/app.py
index 2b6896b76..ee05f379d 100644
index a47b5d2f4..f23ee23ef 100644
--- a/qutebrowser/app.py
+++ b/qutebrowser/app.py
@@ -555,22 +555,8 @@ class Quitter:
args: The commandline as a list of strings.
cwd: The current working directory as a string.
@@ -573,13 +573,8 @@ class Quitter(QObject):
Return:
The commandline as a list of strings.
"""
- if os.path.basename(sys.argv[0]) == 'qutebrowser':
- # Launched via launcher script
- args = [sys.argv[0]]
- cwd = None
- elif hasattr(sys, 'frozen'):
- args = [sys.executable]
- cwd = os.path.abspath(os.path.dirname(sys.executable))
- else:
- args = [sys.executable, '-m', 'qutebrowser']
- cwd = os.path.join(
- os.path.abspath(os.path.dirname(qutebrowser.__file__)), '..')
- if not os.path.isdir(cwd):
- # Probably running from a python egg. Let's fallback to
- # cwd=None and see if that works out.
- # See https://github.com/qutebrowser/qutebrowser/issues/323
- cwd = None
+ args = ['@qutebrowser@']
+ cwd = None
# Add all open pages so they get reopened.
page_args = []
page_args = [] # type: typing.MutableSequence[str]

View File

@ -54,9 +54,6 @@
# Extra preferences
, extraPrefs ? ""
# For meta
, tor-browser-bundle
}:
with stdenv.lib;
@ -93,19 +90,19 @@ let
fteLibPath = makeLibraryPath [ stdenv.cc.cc gmp ];
# Upstream source
version = "9.0.2";
version = "9.0.4";
lang = "en-US";
srcs = {
x86_64-linux = fetchurl {
url = "https://dist.torproject.org/torbrowser/${version}/tor-browser-linux64-${version}_${lang}.tar.xz";
sha256 = "1xdnqphsj7wzwyv927jwd3fi36srx0minydwl5jg5yyd3m3if9hb";
sha256 = "14zlf02i447hcdr4qap8af1k4aziznfp9m2ygqz05zsy8icm1j2k";
};
i686-linux = fetchurl {
url = "https://dist.torproject.org/torbrowser/${version}/tor-browser-linux32-${version}_${lang}.tar.xz";
sha256 = "1qk9fg5dvyyvbngsqla00by8a974mpvq9pnm2djif54lr2nfivwf";
sha256 = "1bmih91gsh698fp2mbnjcq8vmwhg822wanmn99r0xhkmgpi4zw2s";
};
};
in
@ -394,7 +391,15 @@ stdenv.mkDerivation rec {
meta = with stdenv.lib; {
description = "Tor Browser Bundle built by torproject.org";
longDescription = tor-browser-bundle.meta.longDescription;
longDescription = ''
Tor Browser Bundle is a bundle of the Tor daemon, Tor Browser (heavily patched version of
Firefox), several essential extensions for Tor Browser, and some tools that glue those
together with a convenient UI.
`tor-browser-bundle-bin` package is the official version built by torproject.org patched with
`patchelf` to work under nix and with bundled scripts adapted to the read-only nature of
the `/nix/store`.
'';
homepage = "https://www.torproject.org/";
platforms = attrNames srcs;
maintainers = with maintainers; [ offline matejc doublec thoughtpolice joachifm hax404 cap ];

View File

@ -1,345 +0,0 @@
{ stdenv
, fetchgit
, fetchurl
, symlinkJoin
, tor
, tor-browser-unwrapped
# Wrapper runtime
, coreutils
, hicolor-icon-theme
, shared-mime-info
, noto-fonts
, noto-fonts-emoji
# Audio support
, audioSupport ? mediaSupport
, apulse
# Media support (implies audio support)
, mediaSupport ? false
, ffmpeg
# Extensions, common
, zip
# HTTPS Everywhere
, git
, libxml2 # xmllint
, python27
, python27Packages
, rsync
# Pluggable transports
, obfs4
# Customization
, extraPrefs ? ""
, extraExtensions ? [ ]
}:
with stdenv.lib;
let
tor-browser-build_src = fetchgit {
url = "https://git.torproject.org/builders/tor-browser-build.git";
rev = "refs/tags/tbb-7.5a5-build5";
sha256 = "0j37mqldj33fnzghxifvy6v8vdwkcz0i4z81prww64md5s8qcsa9";
};
firefoxExtensions = import ./extensions.nix {
inherit stdenv fetchurl fetchgit zip
git libxml2 python27 python27Packages rsync;
};
bundledExtensions = with firefoxExtensions; [
https-everywhere
noscript
torbutton
tor-launcher
] ++ extraExtensions;
fontsEnv = symlinkJoin {
name = "tor-browser-fonts";
paths = [ noto-fonts noto-fonts-emoji ];
};
fontsDir = "${fontsEnv}/share/fonts";
mediaLibPath = makeLibraryPath [
ffmpeg
];
in
stdenv.mkDerivation {
pname = "tor-browser-bundle";
version = tor-browser-unwrapped.version;
buildInputs = [ tor-browser-unwrapped tor ];
dontUnpack = true;
buildPhase = ":";
# The following creates a customized firefox distribution. For
# simplicity, we copy the entire base firefox runtime, to work around
# firefox's annoying insistence on resolving the installation directory
# relative to the real firefox executable. A little tacky and
# inefficient but it works.
installPhase = ''
TBBUILD=${tor-browser-build_src}/projects/tor-browser
TBDATA_PATH=TorBrowser-Data
self=$out/lib/tor-browser
mkdir -p $self && cd $self
TBDATA_IN_STORE=$self/$TBDATA_PATH
cp -dR ${tor-browser-unwrapped}/lib"/"*"/"* .
chmod -R +w .
# Prepare for autoconfig
cat >defaults/pref/autoconfig.js <<EOF
pref("general.config.filename", "mozilla.cfg");
pref("general.config.obscure_value", 0);
EOF
# Hardcoded configuration
cat >mozilla.cfg <<EOF
// First line must be a comment
// Always update via Nixpkgs
lockPref("app.update.auto", false);
lockPref("app.update.enabled", false);
lockPref("extensions.update.autoUpdateDefault", false);
lockPref("extensions.update.enabled", false);
lockPref("extensions.torbutton.updateNeeded", false);
lockPref("extensions.torbutton.versioncheck_enabled", false);
// Where to find the Nixpkgs tor executable & config
lockPref("extensions.torlauncher.tor_path", "${tor}/bin/tor");
lockPref("extensions.torlauncher.torrc-defaults_path", "$TBDATA_IN_STORE/torrc-defaults");
// Captures store paths
clearPref("extensions.xpiState");
clearPref("extensions.bootstrappedAddons");
// Insist on using IPC for communicating with Tor
lockPref("extensions.torlauncher.control_port_use_ipc", true);
lockPref("extensions.torlauncher.socks_port_use_ipc", true);
// Allow sandbox access to sound devices if using ALSA directly
${if audioSupport then ''
pref("security.sandbox.content.write_path_whitelist", "/dev/snd/");
'' else ''
clearPref("security.sandbox.content.write_path_whitelist");
''}
// User customization
${extraPrefs}
EOF
# Preload extensions
find ${toString bundledExtensions} -name '*.xpi' -exec ln -s -t browser/extensions '{}' '+'
# Copy bundle data
bundlePlatform=linux
bundleData=$TBBUILD/Bundle-Data
mkdir -p $TBDATA_PATH
cat \
$bundleData/$bundlePlatform/Data/Tor/torrc-defaults \
>> $TBDATA_PATH/torrc-defaults
cat \
$bundleData/$bundlePlatform/Data/Browser/profile.default/preferences/extension-overrides.js \
$bundleData/PTConfigs/bridge_prefs.js \
>> defaults/pref/extension-overrides.js
# Configure geoip
#
# tor-launcher insists on resolving geoip data relative to torrc-defaults
# (and passes them directly on the tor command-line).
#
# Write the paths into torrc-defaults anyway, otherwise they'll be
# captured in the runtime torrc.
ln -s -t $TBDATA_PATH ${tor.geoip}/share/tor/geoip{,6}
cat >>$TBDATA_PATH/torrc-defaults <<EOF
GeoIPFile $TBDATA_IN_STORE/geoip
GeoIPv6File $TBDATA_IN_STORE/geoip6
EOF
# Configure pluggable transports
substituteInPlace $TBDATA_PATH/torrc-defaults \
--replace "./TorBrowser/Tor/PluggableTransports/obfs4proxy" \
"${obfs4}/bin/obfs4proxy"
# Hard-code path to TBB fonts; xref: FONTCONFIG_FILE in the wrapper below
sed $bundleData/$bundlePlatform/Data/fontconfig/fonts.conf \
-e "s,<dir>fonts</dir>,<dir>${fontsDir}</dir>," \
> $TBDATA_PATH/fonts.conf
# Generate a suitable wrapper
wrapper_PATH=${makeBinPath [ coreutils ]}
wrapper_XDG_DATA_DIRS=${concatMapStringsSep ":" (x: "${x}/share") [
hicolor-icon-theme
shared-mime-info
]}
${optionalString audioSupport ''
# apulse uses a non-standard library path ...
wrapper_LD_LIBRARY_PATH=${apulse}/lib/apulse''${wrapper_LD_LIBRARY_PATH:+:$wrapper_LD_LIBRARY_PATH}
''}
${optionalString mediaSupport ''
wrapper_LD_LIBRARY_PATH=${mediaLibPath}''${wrapper_LD_LIBRARY_PATH:+:$wrapper_LD_LIBRARY_PATH}
''}
mkdir -p $out/bin
cat >$out/bin/tor-browser <<EOF
#! ${stdenv.shell} -eu
umask 077
PATH=$wrapper_PATH
readonly THE_HOME=\$HOME
TBB_HOME=\''${TBB_HOME:-\''${XDG_DATA_HOME:-\$HOME/.local/share}/tor-browser}
if [[ \''${TBB_HOME:0:1} != / ]] ; then
TBB_HOME=\$PWD/\$TBB_HOME
fi
readonly TBB_HOME
# Basic sanity check: never want to vomit directly onto user's homedir
if [[ "\$TBB_HOME" = "\$THE_HOME" ]] ; then
echo 'TBB_HOME=\$HOME; refusing to run' >&2
exit 1
fi
mkdir -p "\$TBB_HOME"
HOME=\$TBB_HOME
cd "\$HOME"
# Re-init XDG basedir envvars
XDG_CACHE_HOME=\$HOME/.cache
XDG_CONFIG_HOME=\$HOME/.config
XDG_DATA_HOME=\$HOME/.local/share
# Initialize empty TBB runtime state directory hierarchy. Mirror the
# layout used by the official TBB, to avoid the hassle of working
# against the assumptions made by tor-launcher & co.
mkdir -p "\$HOME/TorBrowser" "\$HOME/TorBrowser/Data"
# Initialize the Tor data directory.
mkdir -p "\$HOME/TorBrowser/Data/Tor"
# TBB fails if ownership is too permissive
chmod 0700 "\$HOME/TorBrowser/Data/Tor"
# Initialize the browser profile state. Expect TBB to generate all data.
mkdir -p "\$HOME/TorBrowser/Data/Browser/profile.default"
# Files that capture store paths; re-generated by firefox at startup
rm -rf "\$HOME/TorBrowser/Data/Browser/profile.default"/{compatibility.ini,extensions.ini,extensions.json,startupCache}
# Clear out fontconfig caches
rm -f "\$HOME/.cache/fontconfig/"*.cache-*
# Lift-off!
#
# TZ is set to avoid stat()ing /etc/localtime over and over ...
#
# DBUS_SESSION_BUS_ADDRESS is inherited to avoid auto-launching a new
# dbus instance; to prevent using the session bus, set the envvar to
# an empty/invalid value prior to running tor-browser.
#
# FONTCONFIG_FILE is required to make fontconfig read the TBB
# fonts.conf; upstream uses FONTCONFIG_PATH, but FC_DEBUG=1024
# indicates the system fonts.conf being used instead.
#
# HOME, TMPDIR, XDG_*_HOME are set as a form of soft confinement;
# ideally, tor-browser should not write to any path outside TBB_HOME
# and should run even under strict confinement to TBB_HOME.
#
# XDG_DATA_DIRS is set to prevent searching system directories for
# mime and icon data.
#
# PULSE_{SERVER,COOKIE} is necessary for audio playback w/pulseaudio
#
# APULSE_PLAYBACK_DEVICE is for audio playback w/o pulseaudio (no capture yet)
#
# TOR_* is for using an external tor instance
#
# Parameters lacking a default value below are *required* (enforced by
# -o nounset).
exec env -i \
LD_LIBRARY_PATH=$wrapper_LD_LIBRARY_PATH \
\
TZ=":" \
\
DISPLAY="\$DISPLAY" \
XAUTHORITY="\''${XAUTHORITY:-}" \
DBUS_SESSION_BUS_ADDRESS="\$DBUS_SESSION_BUS_ADDRESS" \
\
HOME="\$HOME" \
TMPDIR="\$XDG_CACHE_HOME/tmp" \
XDG_CONFIG_HOME="\$XDG_CONFIG_HOME" \
XDG_DATA_HOME="\$XDG_DATA_HOME" \
XDG_CACHE_HOME="\$XDG_CACHE_HOME" \
XDG_RUNTIME_DIR="\$HOME/run" \
\
XDG_DATA_DIRS="$wrapper_XDG_DATA_DIRS" \
\
FONTCONFIG_FILE="$TBDATA_IN_STORE/fonts.conf" \
\
APULSE_PLAYBACK_DEVICE="\''${APULSE_PLAYBACK_DEVICE:-plug:dmix}" \
\
TOR_SKIP_LAUNCH="\''${TOR_SKIP_LAUNCH:-}" \
TOR_CONTROL_PORT="\''${TOR_CONTROL_PORT:-}" \
TOR_SOCKS_PORT="\''${TOR_SOCKS_PORT:-}" \
\
$self/firefox \
-no-remote \
-profile "\$HOME/TorBrowser/Data/Browser/profile.default" \
"\$@"
EOF
chmod +x $out/bin/tor-browser
echo "Syntax checking wrapper ..."
bash -n $out/bin/tor-browser
echo "Checking wrapper ..."
DISPLAY="" XAUTHORITY="" DBUS_SESSION_BUS_ADDRESS="" TBB_HOME=$(mktemp -d) \
$out/bin/tor-browser -version >/dev/null
'';
passthru.execdir = "/bin";
meta = with stdenv.lib; {
description = "An unofficial version of the Tor Browser Bundle, built from source";
longDescription = ''
Tor Browser Bundle is a bundle of the Tor daemon, Tor Browser (heavily patched version of
Firefox), several essential extensions for Tor Browser, and some tools that glue those
together with a convenient UI.
`tor-browser-bundle-bin` package is the official version built by torproject.org patched with
`patchelf` to work under nix and with bundled scripts adapted to the read-only nature of
the `/nix/store`.
`tor-browser-bundle` package is the version built completely from source. It reuses the `tor`
package for the tor daemon, `firefoxPackages.tor-browser` package for the tor-browser, and
builds all the extensions from source.
Note that `tor-browser-bundle` package is not only built from source, but also bundles Tor
Browser differently from the official `tor-browser-bundle-bin` implementation. The official
Tor Browser is not a normal UNIX program and is heavily patched for its use in the Tor Browser
Bundle (which `tor-browser-bundle-bin` package then has to work around for the read-only
/nix/store). Meanwhile, `firefoxPackages.tor-browser` reverts all those patches, allowing
`firefoxPackages.tor-browser` to be used independently of the bundle, and then implements what
`tor-browser-bundle` needs for the bundling using a much simpler patch. See the
longDescription and expression of the `firefoxPackages.tor-browser` package for more info.
'';
inherit (tor-browser-unwrapped.meta) homepage platforms license;
hydraPlatforms = [ ];
maintainers = with maintainers; [ joachifm ];
};
}

View File

@ -17,11 +17,11 @@ let
vivaldiName = if isSnapshot then "vivaldi-snapshot" else "vivaldi";
in stdenv.mkDerivation rec {
pname = "vivaldi";
version = "2.10.1745.23-1";
version = "2.10.1745.26-1";
src = fetchurl {
url = "https://downloads.vivaldi.com/${branch}/vivaldi-${branch}_${version}_amd64.deb";
sha256 = "1dkyanasycarka6zikrk0pn6n0xin7hrnysm67rs7pam4lzpq0vh";
sha256 = "0zl5sqa60x9yg7acp6vxgnmfzz27v849mlpp1wgnwh019fx3wf53";
};
unpackPhase = ''

View File

@ -14,6 +14,7 @@
, libpeas
, dbus
, vala
, wrapGAppsHook
, xorg
, xvfb_run
, libxml2
@ -21,13 +22,13 @@
stdenv.mkDerivation rec {
pname = "calls";
version = "unstable-2019-10-29";
version = "0.1.1";
src = fetchFromGitLab {
domain = "source.puri.sm";
owner = "Librem5";
repo = "calls";
rev = "9fe575053d8f01c3a76a6c20d39f0816166d5afd";
rev = "v${version}";
sha256 = "01inx4mvrzvklwrfryw5hw9p89v8cn78m3qmv97g7a3v0h5c0n35";
};
@ -37,6 +38,7 @@ stdenv.mkDerivation rec {
pkgconfig
desktop-file-utils
vala
wrapGAppsHook
];
buildInputs = [

View File

@ -112,8 +112,8 @@ in rec {
terraform_0_11-full = terraform_0_11.full;
terraform_0_12 = pluggable (generic {
version = "0.12.18";
sha256 = "1p2rvs9dw2rzzggf3q2lifwbd82b7xb3jpb4yz5nmggn5g22qlc1";
version = "0.12.19";
sha256 = "067gzxysz8r2myj3rh0vwrs0pmbgb21jxlmawlf4v0lkjnhj6kwv";
patches = [ ./provider-path.patch ];
passthru = { inherit plugins; };
});

Some files were not shown because too many files have changed in this diff Show More