Merge staging-next into staging
This commit is contained in:
commit
e076f677a1
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@ -28,6 +28,7 @@
|
|||||||
/lib/cli.nix @edolstra @nbp @Profpatsch
|
/lib/cli.nix @edolstra @nbp @Profpatsch
|
||||||
/lib/debug.nix @edolstra @nbp @Profpatsch
|
/lib/debug.nix @edolstra @nbp @Profpatsch
|
||||||
/lib/asserts.nix @edolstra @nbp @Profpatsch
|
/lib/asserts.nix @edolstra @nbp @Profpatsch
|
||||||
|
/lib/path.* @infinisil @fricklerhandwerk
|
||||||
|
|
||||||
# Nixpkgs Internals
|
# Nixpkgs Internals
|
||||||
/default.nix @nbp
|
/default.nix @nbp
|
||||||
|
@ -12,6 +12,7 @@ let
|
|||||||
{ name = "lists"; description = "list manipulation functions"; }
|
{ name = "lists"; description = "list manipulation functions"; }
|
||||||
{ name = "debug"; description = "debugging functions"; }
|
{ name = "debug"; description = "debugging functions"; }
|
||||||
{ name = "options"; description = "NixOS / nixpkgs option handling"; }
|
{ name = "options"; description = "NixOS / nixpkgs option handling"; }
|
||||||
|
{ name = "path"; description = "path functions"; }
|
||||||
{ name = "filesystem"; description = "filesystem functions"; }
|
{ name = "filesystem"; description = "filesystem functions"; }
|
||||||
{ name = "sources"; description = "source filtering functions"; }
|
{ name = "sources"; description = "source filtering functions"; }
|
||||||
{ name = "cli"; description = "command-line serialization functions"; }
|
{ name = "cli"; description = "command-line serialization functions"; }
|
||||||
|
@ -10,7 +10,11 @@ with pkgs; stdenv.mkDerivation {
|
|||||||
installPhase = ''
|
installPhase = ''
|
||||||
function docgen {
|
function docgen {
|
||||||
# TODO: wrap lib.$1 in <literal>, make nixdoc not escape it
|
# TODO: wrap lib.$1 in <literal>, make nixdoc not escape it
|
||||||
|
if [[ -e "../lib/$1.nix" ]]; then
|
||||||
nixdoc -c "$1" -d "lib.$1: $2" -f "$1.nix" > "$out/$1.xml"
|
nixdoc -c "$1" -d "lib.$1: $2" -f "$1.nix" > "$out/$1.xml"
|
||||||
|
else
|
||||||
|
nixdoc -c "$1" -d "lib.$1: $2" -f "$1/default.nix" > "$out/$1.xml"
|
||||||
|
fi
|
||||||
echo "<xi:include href='$1.xml' />" >> "$out/index.xml"
|
echo "<xi:include href='$1.xml' />" >> "$out/index.xml"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,19 +2,21 @@
|
|||||||
let
|
let
|
||||||
revision = pkgs.lib.trivial.revisionWithDefault (nixpkgs.revision or "master");
|
revision = pkgs.lib.trivial.revisionWithDefault (nixpkgs.revision or "master");
|
||||||
|
|
||||||
libDefPos = set:
|
libDefPos = prefix: set:
|
||||||
builtins.map
|
builtins.concatMap
|
||||||
(name: {
|
(name: [{
|
||||||
name = name;
|
name = builtins.concatStringsSep "." (prefix ++ [name]);
|
||||||
location = builtins.unsafeGetAttrPos name set;
|
location = builtins.unsafeGetAttrPos name set;
|
||||||
})
|
}] ++ nixpkgsLib.optionals
|
||||||
(builtins.attrNames set);
|
(builtins.length prefix == 0 && builtins.isAttrs set.${name})
|
||||||
|
(libDefPos (prefix ++ [name]) set.${name})
|
||||||
|
) (builtins.attrNames set);
|
||||||
|
|
||||||
libset = toplib:
|
libset = toplib:
|
||||||
builtins.map
|
builtins.map
|
||||||
(subsetname: {
|
(subsetname: {
|
||||||
subsetname = subsetname;
|
subsetname = subsetname;
|
||||||
functions = libDefPos toplib.${subsetname};
|
functions = libDefPos [] toplib.${subsetname};
|
||||||
})
|
})
|
||||||
(builtins.map (x: x.name) libsets);
|
(builtins.map (x: x.name) libsets);
|
||||||
|
|
||||||
|
@ -570,7 +570,13 @@ test run would be:
|
|||||||
|
|
||||||
```
|
```
|
||||||
checkInputs = [ pytest ];
|
checkInputs = [ pytest ];
|
||||||
checkPhase = "pytest";
|
checkPhase = ''
|
||||||
|
runHook preCheck
|
||||||
|
|
||||||
|
pytest
|
||||||
|
|
||||||
|
runHook postCheck
|
||||||
|
'';
|
||||||
```
|
```
|
||||||
|
|
||||||
However, many repositories' test suites do not translate well to nix's build
|
However, many repositories' test suites do not translate well to nix's build
|
||||||
@ -582,7 +588,11 @@ To filter tests using pytest, one can do the following:
|
|||||||
checkInputs = [ pytest ];
|
checkInputs = [ pytest ];
|
||||||
# avoid tests which need additional data or touch network
|
# avoid tests which need additional data or touch network
|
||||||
checkPhase = ''
|
checkPhase = ''
|
||||||
|
runHook preCheck
|
||||||
|
|
||||||
pytest tests/ --ignore=tests/integration -k 'not download and not update'
|
pytest tests/ --ignore=tests/integration -k 'not download and not update'
|
||||||
|
|
||||||
|
runHook postCheck
|
||||||
'';
|
'';
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -1408,7 +1418,11 @@ example of such a situation is when `py.test` is used.
|
|||||||
# assumes the tests are located in tests
|
# assumes the tests are located in tests
|
||||||
checkInputs = [ pytest ];
|
checkInputs = [ pytest ];
|
||||||
checkPhase = ''
|
checkPhase = ''
|
||||||
|
runHook preCheck
|
||||||
|
|
||||||
py.test -k 'not function_name and not other_function' tests
|
py.test -k 'not function_name and not other_function' tests
|
||||||
|
|
||||||
|
runHook postCheck
|
||||||
'';
|
'';
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
@ -27,7 +27,6 @@ let
|
|||||||
maintainers = import ../maintainers/maintainer-list.nix;
|
maintainers = import ../maintainers/maintainer-list.nix;
|
||||||
teams = callLibs ../maintainers/team-list.nix;
|
teams = callLibs ../maintainers/team-list.nix;
|
||||||
meta = callLibs ./meta.nix;
|
meta = callLibs ./meta.nix;
|
||||||
sources = callLibs ./sources.nix;
|
|
||||||
versions = callLibs ./versions.nix;
|
versions = callLibs ./versions.nix;
|
||||||
|
|
||||||
# module system
|
# module system
|
||||||
@ -53,7 +52,9 @@ let
|
|||||||
fetchers = callLibs ./fetchers.nix;
|
fetchers = callLibs ./fetchers.nix;
|
||||||
|
|
||||||
# Eval-time filesystem handling
|
# Eval-time filesystem handling
|
||||||
|
path = callLibs ./path;
|
||||||
filesystem = callLibs ./filesystem.nix;
|
filesystem = callLibs ./filesystem.nix;
|
||||||
|
sources = callLibs ./sources.nix;
|
||||||
|
|
||||||
# back-compat aliases
|
# back-compat aliases
|
||||||
platforms = self.systems.doubles;
|
platforms = self.systems.doubles;
|
||||||
|
196
lib/path/README.md
Normal file
196
lib/path/README.md
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
# Path library
|
||||||
|
|
||||||
|
This document explains why the `lib.path` library is designed the way it is.
|
||||||
|
|
||||||
|
The purpose of this library is to process [filesystem paths]. It does not read files from the filesystem.
|
||||||
|
It exists to support the native Nix [path value type] with extra functionality.
|
||||||
|
|
||||||
|
[filesystem paths]: https://en.m.wikipedia.org/wiki/Path_(computing)
|
||||||
|
[path value type]: https://nixos.org/manual/nix/stable/language/values.html#type-path
|
||||||
|
|
||||||
|
As an extension of the path value type, it inherits the same intended use cases and limitations:
|
||||||
|
- Only use paths to access files at evaluation time, such as the local project source.
|
||||||
|
- Paths cannot point to derivations, so they are unfit to represent dependencies.
|
||||||
|
- A path implicitly imports the referenced files into the Nix store when interpolated to a string. Therefore paths are not suitable to access files at build- or run-time, as you risk importing the path from the evaluation system instead.
|
||||||
|
|
||||||
|
Overall, this library works with two types of paths:
|
||||||
|
- Absolute paths are represented with the Nix [path value type]. Nix automatically normalises these paths.
|
||||||
|
- Subpaths are represented with the [string value type] since path value types don't support relative paths. This library normalises these paths as safely as possible. Absolute paths in strings are not supported.
|
||||||
|
|
||||||
|
A subpath refers to a specific file or directory within an absolute base directory.
|
||||||
|
It is a stricter form of a relative path, notably [without support for `..` components][parents] since those could escape the base directory.
|
||||||
|
|
||||||
|
[string value type]: https://nixos.org/manual/nix/stable/language/values.html#type-string
|
||||||
|
|
||||||
|
This library is designed to be as safe and intuitive as possible, throwing errors when operations are attempted that would produce surprising results, and giving the expected result otherwise.
|
||||||
|
|
||||||
|
This library is designed to work well as a dependency for the `lib.filesystem` and `lib.sources` library components. Contrary to these library components, `lib.path` does not read any paths from the filesystem.
|
||||||
|
|
||||||
|
This library makes only these assumptions about paths and no others:
|
||||||
|
- `dirOf path` returns the path to the parent directory of `path`, unless `path` is the filesystem root, in which case `path` is returned.
|
||||||
|
- There can be multiple filesystem roots: `p == dirOf p` and `q == dirOf q` does not imply `p == q`.
|
||||||
|
- While there's only a single filesystem root in stable Nix, the [lazy trees feature](https://github.com/NixOS/nix/pull/6530) introduces [additional filesystem roots](https://github.com/NixOS/nix/pull/6530#discussion_r1041442173).
|
||||||
|
- `path + ("/" + string)` returns the path to the `string` subdirectory in `path`.
|
||||||
|
- If `string` contains no `/` characters, then `dirOf (path + ("/" + string)) == path`.
|
||||||
|
- If `string` contains no `/` characters, then `baseNameOf (path + ("/" + string)) == string`.
|
||||||
|
- `path1 == path2` returns `true` only if `path1` points to the same filesystem path as `path2`.
|
||||||
|
|
||||||
|
Notably we do not make the assumption that we can turn paths into strings using `toString path`.
|
||||||
|
|
||||||
|
## Design decisions
|
||||||
|
|
||||||
|
Each subsection here contains a decision along with arguments and counter-arguments for (+) and against (-) that decision.
|
||||||
|
|
||||||
|
### Leading dots for relative paths
|
||||||
|
[leading-dots]: #leading-dots-for-relative-paths
|
||||||
|
|
||||||
|
Observing: Since subpaths are a form of relative paths, they can have a leading `./` to indicate it being a relative path, this is generally not necessary for tools though.
|
||||||
|
|
||||||
|
Considering: Paths should be as explicit, consistent and unambiguous as possible.
|
||||||
|
|
||||||
|
Decision: Returned subpaths should always have a leading `./`.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Arguments</summary>
|
||||||
|
|
||||||
|
- (+) In shells, just running `foo` as a command wouldn't execute the file `foo`, whereas `./foo` would execute the file. In contrast, `foo/bar` does execute that file without the need for `./`. This can lead to confusion about when a `./` needs to be prefixed. If a `./` is always included, this becomes a non-issue. This effectively then means that paths don't overlap with command names.
|
||||||
|
- (+) Prepending with `./` makes the subpaths always valid as relative Nix path expressions.
|
||||||
|
- (+) Using paths in command line arguments could give problems if not escaped properly, e.g. if a path was `--version`. This is not a problem with `./--version`. This effectively then means that paths don't overlap with GNU-style command line options.
|
||||||
|
- (-) `./` is not required to resolve relative paths, resolution always has an implicit `./` as prefix.
|
||||||
|
- (-) It's less noisy without the `./`, e.g. in error messages.
|
||||||
|
- (+) But similarly, it could be confusing whether something was even a path.
|
||||||
|
e.g. `foo` could be anything, but `./foo` is more clearly a path.
|
||||||
|
- (+) Makes it more uniform with absolute paths (those always start with `/`).
|
||||||
|
- (-) That is not relevant for practical purposes.
|
||||||
|
- (+) `find` also outputs results with `./`.
|
||||||
|
- (-) But only if you give it an argument of `.`. If you give it the argument `some-directory`, it won't prefix that.
|
||||||
|
- (-) `realpath --relative-to` doesn't prefix relative paths with `./`.
|
||||||
|
- (+) There is no need to return the same result as `realpath`.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### Representation of the current directory
|
||||||
|
[curdir]: #representation-of-the-current-directory
|
||||||
|
|
||||||
|
Observing: The subpath that produces the base directory can be represented with `.` or `./` or `./.`.
|
||||||
|
|
||||||
|
Considering: Paths should be as consistent and unambiguous as possible.
|
||||||
|
|
||||||
|
Decision: It should be `./.`.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Arguments</summary>
|
||||||
|
|
||||||
|
- (+) `./` would be inconsistent with [the decision to not persist trailing slashes][trailing-slashes].
|
||||||
|
- (-) `.` is how `realpath` normalises paths.
|
||||||
|
- (+) `.` can be interpreted as a shell command (it's a builtin for sourcing files in `bash` and `zsh`).
|
||||||
|
- (+) `.` would be the only path without a `/`. It could not be used as a Nix path expression, since those require at least one `/` to be parsed as such.
|
||||||
|
- (-) `./.` is rather long.
|
||||||
|
- (-) We don't require users to type this though, as it's only output by the library.
|
||||||
|
As inputs all three variants are supported for subpaths (and we can't do anything about absolute paths)
|
||||||
|
- (-) `builtins.dirOf "foo" == "."`, so `.` would be consistent with that.
|
||||||
|
- (+) `./.` is consistent with the [decision to have leading `./`][leading-dots].
|
||||||
|
- (+) `./.` is a valid Nix path expression, although this property does not hold for every relative path or subpath.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### Subpath representation
|
||||||
|
[relrepr]: #subpath-representation
|
||||||
|
|
||||||
|
Observing: Subpaths such as `foo/bar` can be represented in various ways:
|
||||||
|
- string: `"foo/bar"`
|
||||||
|
- list with all the components: `[ "foo" "bar" ]`
|
||||||
|
- attribute set: `{ type = "relative-path"; components = [ "foo" "bar" ]; }`
|
||||||
|
|
||||||
|
Considering: Paths should be as safe to use as possible. We should generate string outputs in the library and not encourage users to do that themselves.
|
||||||
|
|
||||||
|
Decision: Paths are represented as strings.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Arguments</summary>
|
||||||
|
|
||||||
|
- (+) It's simpler for the users of the library. One doesn't have to convert a path a string before it can be used.
|
||||||
|
- (+) Naively converting the list representation to a string with `concatStringsSep "/"` would break for `[]`, requiring library users to be more careful.
|
||||||
|
- (+) It doesn't encourage people to do their own path processing and instead use the library.
|
||||||
|
With a list representation it would seem easy to just use `lib.lists.init` to get the parent directory, but then it breaks for `.`, which would be represented as `[ ]`.
|
||||||
|
- (+) `+` is convenient and doesn't work on lists and attribute sets.
|
||||||
|
- (-) Shouldn't use `+` anyways, we export safer functions for path manipulation.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### Parent directory
|
||||||
|
[parents]: #parent-directory
|
||||||
|
|
||||||
|
Observing: Relative paths can have `..` components, which refer to the parent directory.
|
||||||
|
|
||||||
|
Considering: Paths should be as safe and unambiguous as possible.
|
||||||
|
|
||||||
|
Decision: `..` path components in string paths are not supported, neither as inputs nor as outputs. Hence, string paths are called subpaths, rather than relative paths.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Arguments</summary>
|
||||||
|
|
||||||
|
- (+) If we wanted relative paths to behave according to the "physical" interpretation (as a directory tree with relations between nodes), it would require resolving symlinks, since e.g. `foo/..` would not be the same as `.` if `foo` is a symlink.
|
||||||
|
- (-) The "logical" interpretation is also valid (treating paths as a sequence of names), and is used by some software. It is simpler, and not using symlinks at all is safer.
|
||||||
|
- (+) Mixing both models can lead to surprises.
|
||||||
|
- (+) We can't resolve symlinks without filesystem access.
|
||||||
|
- (+) Nix also doesn't support reading symlinks at evaluation time.
|
||||||
|
- (-) We could just not handle such cases, e.g. `equals "foo" "foo/bar/.. == false`. The paths are different, we don't need to check whether the paths point to the same thing.
|
||||||
|
- (+) Assume we said `relativeTo /foo /bar == "../bar"`. If this is used like `/bar/../foo` in the end, and `bar` turns out to be a symlink to somewhere else, this won't be accurate.
|
||||||
|
- (-) We could decide to not support such ambiguous operations, or mark them as such, e.g. the normal `relativeTo` will error on such a case, but there could be `extendedRelativeTo` supporting that.
|
||||||
|
- (-) `..` are a part of paths, a path library should therefore support it.
|
||||||
|
- (+) If we can convincingly argue that all such use cases are better done e.g. with runtime tools, the library not supporting it can nudge people towards using those.
|
||||||
|
- (-) We could allow "..", but only in the prefix.
|
||||||
|
- (+) Then we'd have to throw an error for doing `append /some/path "../foo"`, making it non-composable.
|
||||||
|
- (+) The same is for returning paths with `..`: `relativeTo /foo /bar => "../bar"` would produce a non-composable path.
|
||||||
|
- (+) We argue that `..` is not needed at the Nix evaluation level, since we'd always start evaluation from the project root and don't go up from there.
|
||||||
|
- (+) `..` is supported in Nix paths, turning them into absolute paths.
|
||||||
|
- (-) This is ambiguous in the presence of symlinks.
|
||||||
|
- (+) If you need `..` for building or runtime, you can use build-/run-time tooling to create those (e.g. `realpath` with `--relative-to`), or use absolute paths instead.
|
||||||
|
This also gives you the ability to correctly handle symlinks.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### Trailing slashes
|
||||||
|
[trailing-slashes]: #trailing-slashes
|
||||||
|
|
||||||
|
Observing: Subpaths can contain trailing slashes, like `foo/`, indicating that the path points to a directory and not a file.
|
||||||
|
|
||||||
|
Considering: Paths should be as consistent as possible, there should only be a single normalisation for the same path.
|
||||||
|
|
||||||
|
Decision: All functions remove trailing slashes in their results.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Arguments</summary>
|
||||||
|
|
||||||
|
- (+) It allows normalisations to be unique, in that there's only a single normalisation for the same path. If trailing slashes were preserved, both `foo/bar` and `foo/bar/` would be valid but different normalisations for the same path.
|
||||||
|
- Comparison to other frameworks to figure out the least surprising behavior:
|
||||||
|
- (+) Nix itself doesn't support trailing slashes when parsing and doesn't preserve them when appending paths.
|
||||||
|
- (-) [Rust's std::path](https://doc.rust-lang.org/std/path/index.html) does preserve them during [construction](https://doc.rust-lang.org/std/path/struct.Path.html#method.new).
|
||||||
|
- (+) Doesn't preserve them when returning individual [components](https://doc.rust-lang.org/std/path/struct.Path.html#method.components).
|
||||||
|
- (+) Doesn't preserve them when [canonicalizing](https://doc.rust-lang.org/std/path/struct.Path.html#method.canonicalize).
|
||||||
|
- (+) [Python 3's pathlib](https://docs.python.org/3/library/pathlib.html#module-pathlib) doesn't preserve them during [construction](https://docs.python.org/3/library/pathlib.html#pathlib.PurePath).
|
||||||
|
- Notably it represents the individual components as a list internally.
|
||||||
|
- (-) [Haskell's filepath](https://hackage.haskell.org/package/filepath-1.4.100.0) has [explicit support](https://hackage.haskell.org/package/filepath-1.4.100.0/docs/System-FilePath.html#g:6) for handling trailing slashes.
|
||||||
|
- (-) Does preserve them for [normalisation](https://hackage.haskell.org/package/filepath-1.4.100.0/docs/System-FilePath.html#v:normalise).
|
||||||
|
- (-) [NodeJS's Path library](https://nodejs.org/api/path.html) preserves trailing slashes for [normalisation](https://nodejs.org/api/path.html#pathnormalizepath).
|
||||||
|
- (+) For [parsing a path](https://nodejs.org/api/path.html#pathparsepath) into its significant elements, trailing slashes are not preserved.
|
||||||
|
- (+) Nix's builtin function `dirOf` gives an unexpected result for paths with trailing slashes: `dirOf "foo/bar/" == "foo/bar"`.
|
||||||
|
Inconsistently, `baseNameOf` works correctly though: `baseNameOf "foo/bar/" == "bar"`.
|
||||||
|
- (-) We are writing a path library to improve handling of paths though, so we shouldn't use these functions and discourage their use.
|
||||||
|
- (-) Unexpected result when normalising intermediate paths, like `relative.normalise ("foo" + "/") + "bar" == "foobar"`.
|
||||||
|
- (+) This is not a practical use case though.
|
||||||
|
- (+) Don't use `+` to append paths, this library has a `join` function for that.
|
||||||
|
- (-) Users might use `+` out of habit though.
|
||||||
|
- (+) The `realpath` command also removes trailing slashes.
|
||||||
|
- (+) Even with a trailing slash, the path is the same, it's only an indication that it's a directory.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## Other implementations and references
|
||||||
|
|
||||||
|
- [Rust](https://doc.rust-lang.org/std/path/struct.Path.html)
|
||||||
|
- [Python](https://docs.python.org/3/library/pathlib.html)
|
||||||
|
- [Haskell](https://hackage.haskell.org/package/filepath-1.4.100.0/docs/System-FilePath.html)
|
||||||
|
- [Nodejs](https://nodejs.org/api/path.html)
|
||||||
|
- [POSIX.1-2017](https://pubs.opengroup.org/onlinepubs/9699919799/nframe.html)
|
218
lib/path/default.nix
Normal file
218
lib/path/default.nix
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
# Functions for working with paths, see ./path.md
|
||||||
|
{ lib }:
|
||||||
|
let
|
||||||
|
|
||||||
|
inherit (builtins)
|
||||||
|
isString
|
||||||
|
split
|
||||||
|
match
|
||||||
|
;
|
||||||
|
|
||||||
|
inherit (lib.lists)
|
||||||
|
length
|
||||||
|
head
|
||||||
|
last
|
||||||
|
genList
|
||||||
|
elemAt
|
||||||
|
;
|
||||||
|
|
||||||
|
inherit (lib.strings)
|
||||||
|
concatStringsSep
|
||||||
|
substring
|
||||||
|
;
|
||||||
|
|
||||||
|
inherit (lib.asserts)
|
||||||
|
assertMsg
|
||||||
|
;
|
||||||
|
|
||||||
|
# Return the reason why a subpath is invalid, or `null` if it's valid
|
||||||
|
subpathInvalidReason = value:
|
||||||
|
if ! isString value then
|
||||||
|
"The given value is of type ${builtins.typeOf value}, but a string was expected"
|
||||||
|
else if value == "" then
|
||||||
|
"The given string is empty"
|
||||||
|
else if substring 0 1 value == "/" then
|
||||||
|
"The given string \"${value}\" starts with a `/`, representing an absolute path"
|
||||||
|
# We don't support ".." components, see ./path.md#parent-directory
|
||||||
|
else if match "(.*/)?\\.\\.(/.*)?" value != null then
|
||||||
|
"The given string \"${value}\" contains a `..` component, which is not allowed in subpaths"
|
||||||
|
else null;
|
||||||
|
|
||||||
|
# Split and normalise a relative path string into its components.
|
||||||
|
# Error for ".." components and doesn't include "." components
|
||||||
|
splitRelPath = path:
|
||||||
|
let
|
||||||
|
# Split the string into its parts using regex for efficiency. This regex
|
||||||
|
# matches patterns like "/", "/./", "/././", with arbitrarily many "/"s
|
||||||
|
# together. These are the main special cases:
|
||||||
|
# - Leading "./" gets split into a leading "." part
|
||||||
|
# - Trailing "/." or "/" get split into a trailing "." or ""
|
||||||
|
# part respectively
|
||||||
|
#
|
||||||
|
# These are the only cases where "." and "" parts can occur
|
||||||
|
parts = split "/+(\\./+)*" path;
|
||||||
|
|
||||||
|
# `split` creates a list of 2 * k + 1 elements, containing the k +
|
||||||
|
# 1 parts, interleaved with k matches where k is the number of
|
||||||
|
# (non-overlapping) matches. This calculation here gets the number of parts
|
||||||
|
# back from the list length
|
||||||
|
# floor( (2 * k + 1) / 2 ) + 1 == floor( k + 1/2 ) + 1 == k + 1
|
||||||
|
partCount = length parts / 2 + 1;
|
||||||
|
|
||||||
|
# To assemble the final list of components we want to:
|
||||||
|
# - Skip a potential leading ".", normalising "./foo" to "foo"
|
||||||
|
# - Skip a potential trailing "." or "", normalising "foo/" and "foo/." to
|
||||||
|
# "foo". See ./path.md#trailing-slashes
|
||||||
|
skipStart = if head parts == "." then 1 else 0;
|
||||||
|
skipEnd = if last parts == "." || last parts == "" then 1 else 0;
|
||||||
|
|
||||||
|
# We can now know the length of the result by removing the number of
|
||||||
|
# skipped parts from the total number
|
||||||
|
componentCount = partCount - skipEnd - skipStart;
|
||||||
|
|
||||||
|
in
|
||||||
|
# Special case of a single "." path component. Such a case leaves a
|
||||||
|
# componentCount of -1 due to the skipStart/skipEnd not verifying that
|
||||||
|
# they don't refer to the same character
|
||||||
|
if path == "." then []
|
||||||
|
|
||||||
|
# Generate the result list directly. This is more efficient than a
|
||||||
|
# combination of `filter`, `init` and `tail`, because here we don't
|
||||||
|
# allocate any intermediate lists
|
||||||
|
else genList (index:
|
||||||
|
# To get to the element we need to add the number of parts we skip and
|
||||||
|
# multiply by two due to the interleaved layout of `parts`
|
||||||
|
elemAt parts ((skipStart + index) * 2)
|
||||||
|
) componentCount;
|
||||||
|
|
||||||
|
# Join relative path components together
|
||||||
|
joinRelPath = components:
|
||||||
|
# Always return relative paths with `./` as a prefix (./path.md#leading-dots-for-relative-paths)
|
||||||
|
"./" +
|
||||||
|
# An empty string is not a valid relative path, so we need to return a `.` when we have no components
|
||||||
|
(if components == [] then "." else concatStringsSep "/" components);
|
||||||
|
|
||||||
|
in /* No rec! Add dependencies on this file at the top. */ {
|
||||||
|
|
||||||
|
|
||||||
|
/* Whether a value is a valid subpath string.
|
||||||
|
|
||||||
|
- The value is a string
|
||||||
|
|
||||||
|
- The string is not empty
|
||||||
|
|
||||||
|
- The string doesn't start with a `/`
|
||||||
|
|
||||||
|
- The string doesn't contain any `..` path components
|
||||||
|
|
||||||
|
Type:
|
||||||
|
subpath.isValid :: String -> Bool
|
||||||
|
|
||||||
|
Example:
|
||||||
|
# Not a string
|
||||||
|
subpath.isValid null
|
||||||
|
=> false
|
||||||
|
|
||||||
|
# Empty string
|
||||||
|
subpath.isValid ""
|
||||||
|
=> false
|
||||||
|
|
||||||
|
# Absolute path
|
||||||
|
subpath.isValid "/foo"
|
||||||
|
=> false
|
||||||
|
|
||||||
|
# Contains a `..` path component
|
||||||
|
subpath.isValid "../foo"
|
||||||
|
=> false
|
||||||
|
|
||||||
|
# Valid subpath
|
||||||
|
subpath.isValid "foo/bar"
|
||||||
|
=> true
|
||||||
|
|
||||||
|
# Doesn't need to be normalised
|
||||||
|
subpath.isValid "./foo//bar/"
|
||||||
|
=> true
|
||||||
|
*/
|
||||||
|
subpath.isValid = value:
|
||||||
|
subpathInvalidReason value == null;
|
||||||
|
|
||||||
|
|
||||||
|
/* Normalise a subpath. Throw an error if the subpath isn't valid, see
|
||||||
|
`lib.path.subpath.isValid`
|
||||||
|
|
||||||
|
- Limit repeating `/` to a single one
|
||||||
|
|
||||||
|
- Remove redundant `.` components
|
||||||
|
|
||||||
|
- Remove trailing `/` and `/.`
|
||||||
|
|
||||||
|
- Add leading `./`
|
||||||
|
|
||||||
|
Laws:
|
||||||
|
|
||||||
|
- (Idempotency) Normalising multiple times gives the same result:
|
||||||
|
|
||||||
|
subpath.normalise (subpath.normalise p) == subpath.normalise p
|
||||||
|
|
||||||
|
- (Uniqueness) There's only a single normalisation for the paths that lead to the same file system node:
|
||||||
|
|
||||||
|
subpath.normalise p != subpath.normalise q -> $(realpath ${p}) != $(realpath ${q})
|
||||||
|
|
||||||
|
- Don't change the result when appended to a Nix path value:
|
||||||
|
|
||||||
|
base + ("/" + p) == base + ("/" + subpath.normalise p)
|
||||||
|
|
||||||
|
- Don't change the path according to `realpath`:
|
||||||
|
|
||||||
|
$(realpath ${p}) == $(realpath ${subpath.normalise p})
|
||||||
|
|
||||||
|
- Only error on invalid subpaths:
|
||||||
|
|
||||||
|
builtins.tryEval (subpath.normalise p)).success == subpath.isValid p
|
||||||
|
|
||||||
|
Type:
|
||||||
|
subpath.normalise :: String -> String
|
||||||
|
|
||||||
|
Example:
|
||||||
|
# limit repeating `/` to a single one
|
||||||
|
subpath.normalise "foo//bar"
|
||||||
|
=> "./foo/bar"
|
||||||
|
|
||||||
|
# remove redundant `.` components
|
||||||
|
subpath.normalise "foo/./bar"
|
||||||
|
=> "./foo/bar"
|
||||||
|
|
||||||
|
# add leading `./`
|
||||||
|
subpath.normalise "foo/bar"
|
||||||
|
=> "./foo/bar"
|
||||||
|
|
||||||
|
# remove trailing `/`
|
||||||
|
subpath.normalise "foo/bar/"
|
||||||
|
=> "./foo/bar"
|
||||||
|
|
||||||
|
# remove trailing `/.`
|
||||||
|
subpath.normalise "foo/bar/."
|
||||||
|
=> "./foo/bar"
|
||||||
|
|
||||||
|
# Return the current directory as `./.`
|
||||||
|
subpath.normalise "."
|
||||||
|
=> "./."
|
||||||
|
|
||||||
|
# error on `..` path components
|
||||||
|
subpath.normalise "foo/../bar"
|
||||||
|
=> <error>
|
||||||
|
|
||||||
|
# error on empty string
|
||||||
|
subpath.normalise ""
|
||||||
|
=> <error>
|
||||||
|
|
||||||
|
# error on absolute path
|
||||||
|
subpath.normalise "/foo"
|
||||||
|
=> <error>
|
||||||
|
*/
|
||||||
|
subpath.normalise = path:
|
||||||
|
assert assertMsg (subpathInvalidReason path == null)
|
||||||
|
"lib.path.subpath.normalise: Argument is not a valid subpath string: ${subpathInvalidReason path}";
|
||||||
|
joinRelPath (splitRelPath path);
|
||||||
|
|
||||||
|
}
|
34
lib/path/tests/default.nix
Normal file
34
lib/path/tests/default.nix
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
nixpkgs ? ../../..,
|
||||||
|
system ? builtins.currentSystem,
|
||||||
|
pkgs ? import nixpkgs {
|
||||||
|
config = {};
|
||||||
|
overlays = [];
|
||||||
|
inherit system;
|
||||||
|
},
|
||||||
|
libpath ? ../..,
|
||||||
|
# Random seed
|
||||||
|
seed ? null,
|
||||||
|
}:
|
||||||
|
pkgs.runCommand "lib-path-tests" {
|
||||||
|
nativeBuildInputs = with pkgs; [
|
||||||
|
nix
|
||||||
|
jq
|
||||||
|
bc
|
||||||
|
];
|
||||||
|
} ''
|
||||||
|
# Needed to make Nix evaluation work
|
||||||
|
export NIX_STATE_DIR=$(mktemp -d)
|
||||||
|
|
||||||
|
cp -r ${libpath} lib
|
||||||
|
export TEST_LIB=$PWD/lib
|
||||||
|
|
||||||
|
echo "Running unit tests lib/path/tests/unit.nix"
|
||||||
|
nix-instantiate --eval lib/path/tests/unit.nix \
|
||||||
|
--argstr libpath "$TEST_LIB"
|
||||||
|
|
||||||
|
echo "Running property tests lib/path/tests/prop.sh"
|
||||||
|
bash lib/path/tests/prop.sh ${toString seed}
|
||||||
|
|
||||||
|
touch $out
|
||||||
|
''
|
64
lib/path/tests/generate.awk
Normal file
64
lib/path/tests/generate.awk
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
# Generate random path-like strings, separated by null characters.
|
||||||
|
#
|
||||||
|
# Invocation:
|
||||||
|
#
|
||||||
|
# awk -f ./generate.awk -v <variable>=<value> | tr '\0' '\n'
|
||||||
|
#
|
||||||
|
# Customizable variables (all default to 0):
|
||||||
|
# - seed: Deterministic random seed to use for generation
|
||||||
|
# - count: Number of paths to generate
|
||||||
|
# - extradotweight: Give extra weight to dots being generated
|
||||||
|
# - extraslashweight: Give extra weight to slashes being generated
|
||||||
|
# - extranullweight: Give extra weight to null being generated, making paths shorter
|
||||||
|
BEGIN {
|
||||||
|
# Random seed, passed explicitly for reproducibility
|
||||||
|
srand(seed)
|
||||||
|
|
||||||
|
# Don't include special characters below 32
|
||||||
|
minascii = 32
|
||||||
|
# Don't include DEL at 128
|
||||||
|
maxascii = 127
|
||||||
|
upperascii = maxascii - minascii
|
||||||
|
|
||||||
|
# add extra weight for ., in addition to the one weight from the ascii range
|
||||||
|
upperdot = upperascii + extradotweight
|
||||||
|
|
||||||
|
# add extra weight for /, in addition to the one weight from the ascii range
|
||||||
|
upperslash = upperdot + extraslashweight
|
||||||
|
|
||||||
|
# add extra weight for null, indicating the end of the string
|
||||||
|
# Must be at least 1 to have strings end at all
|
||||||
|
total = upperslash + 1 + extranullweight
|
||||||
|
|
||||||
|
# new=1 indicates that it's a new string
|
||||||
|
new=1
|
||||||
|
while (count > 0) {
|
||||||
|
|
||||||
|
# Random integer between [0, total)
|
||||||
|
value = int(rand() * total)
|
||||||
|
|
||||||
|
if (value < upperascii) {
|
||||||
|
# Ascii range
|
||||||
|
printf("%c", value + minascii)
|
||||||
|
new=0
|
||||||
|
|
||||||
|
} else if (value < upperdot) {
|
||||||
|
# Dot range
|
||||||
|
printf "."
|
||||||
|
new=0
|
||||||
|
|
||||||
|
} else if (value < upperslash) {
|
||||||
|
# If it's the start of a new path, only generate a / in 10% of cases
|
||||||
|
# This is always an invalid subpath, which is not a very interesting case
|
||||||
|
if (new && rand() > 0.1) continue
|
||||||
|
printf "/"
|
||||||
|
|
||||||
|
} else {
|
||||||
|
# Do not generate empty strings
|
||||||
|
if (new) continue
|
||||||
|
printf "\x00"
|
||||||
|
count--
|
||||||
|
new=1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
60
lib/path/tests/prop.nix
Normal file
60
lib/path/tests/prop.nix
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
# Given a list of path-like strings, check some properties of the path library
|
||||||
|
# using those paths and return a list of attribute sets of the following form:
|
||||||
|
#
|
||||||
|
# { <string> = <lib.path.subpath.normalise string>; }
|
||||||
|
#
|
||||||
|
# If `normalise` fails to evaluate, the attribute value is set to `""`.
|
||||||
|
# If not, the resulting value is normalised again and an appropriate attribute set added to the output list.
|
||||||
|
{
|
||||||
|
# The path to the nixpkgs lib to use
|
||||||
|
libpath,
|
||||||
|
# A flat directory containing files with randomly-generated
|
||||||
|
# path-like values
|
||||||
|
dir,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
lib = import libpath;
|
||||||
|
|
||||||
|
# read each file into a string
|
||||||
|
strings = map (name:
|
||||||
|
builtins.readFile (dir + "/${name}")
|
||||||
|
) (builtins.attrNames (builtins.readDir dir));
|
||||||
|
|
||||||
|
inherit (lib.path.subpath) normalise isValid;
|
||||||
|
inherit (lib.asserts) assertMsg;
|
||||||
|
|
||||||
|
normaliseAndCheck = str:
|
||||||
|
let
|
||||||
|
originalValid = isValid str;
|
||||||
|
|
||||||
|
tryOnce = builtins.tryEval (normalise str);
|
||||||
|
tryTwice = builtins.tryEval (normalise tryOnce.value);
|
||||||
|
|
||||||
|
absConcatOrig = /. + ("/" + str);
|
||||||
|
absConcatNormalised = /. + ("/" + tryOnce.value);
|
||||||
|
in
|
||||||
|
# Check the lib.path.subpath.normalise property to only error on invalid subpaths
|
||||||
|
assert assertMsg
|
||||||
|
(originalValid -> tryOnce.success)
|
||||||
|
"Even though string \"${str}\" is valid as a subpath, the normalisation for it failed";
|
||||||
|
assert assertMsg
|
||||||
|
(! originalValid -> ! tryOnce.success)
|
||||||
|
"Even though string \"${str}\" is invalid as a subpath, the normalisation for it succeeded";
|
||||||
|
|
||||||
|
# Check normalisation idempotency
|
||||||
|
assert assertMsg
|
||||||
|
(originalValid -> tryTwice.success)
|
||||||
|
"For valid subpath \"${str}\", the normalisation \"${tryOnce.value}\" was not a valid subpath";
|
||||||
|
assert assertMsg
|
||||||
|
(originalValid -> tryOnce.value == tryTwice.value)
|
||||||
|
"For valid subpath \"${str}\", normalising it once gives \"${tryOnce.value}\" but normalising it twice gives a different result: \"${tryTwice.value}\"";
|
||||||
|
|
||||||
|
# Check that normalisation doesn't change a string when appended to an absolute Nix path value
|
||||||
|
assert assertMsg
|
||||||
|
(originalValid -> absConcatOrig == absConcatNormalised)
|
||||||
|
"For valid subpath \"${str}\", appending to an absolute Nix path value gives \"${absConcatOrig}\", but appending the normalised result \"${tryOnce.value}\" gives a different value \"${absConcatNormalised}\"";
|
||||||
|
|
||||||
|
# Return an empty string when failed
|
||||||
|
if tryOnce.success then tryOnce.value else "";
|
||||||
|
|
||||||
|
in lib.genAttrs strings normaliseAndCheck
|
179
lib/path/tests/prop.sh
Executable file
179
lib/path/tests/prop.sh
Executable file
@ -0,0 +1,179 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Property tests for the `lib.path` library
|
||||||
|
#
|
||||||
|
# It generates random path-like strings and runs the functions on
|
||||||
|
# them, checking that the expected laws of the functions hold
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
shopt -s inherit_errexit
|
||||||
|
|
||||||
|
# https://stackoverflow.com/a/246128
|
||||||
|
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||||
|
|
||||||
|
if test -z "${TEST_LIB:-}"; then
|
||||||
|
TEST_LIB=$SCRIPT_DIR/../..
|
||||||
|
fi
|
||||||
|
|
||||||
|
tmp="$(mktemp -d)"
|
||||||
|
clean_up() {
|
||||||
|
rm -rf "$tmp"
|
||||||
|
}
|
||||||
|
trap clean_up EXIT
|
||||||
|
mkdir -p "$tmp/work"
|
||||||
|
cd "$tmp/work"
|
||||||
|
|
||||||
|
# Defaulting to a random seed but the first argument can override this
|
||||||
|
seed=${1:-$RANDOM}
|
||||||
|
echo >&2 "Using seed $seed, use \`lib/path/tests/prop.sh $seed\` to reproduce this result"
|
||||||
|
|
||||||
|
# The number of random paths to generate. This specific number was chosen to
|
||||||
|
# be fast enough while still generating enough variety to detect bugs.
|
||||||
|
count=500
|
||||||
|
|
||||||
|
debug=0
|
||||||
|
# debug=1 # print some extra info
|
||||||
|
# debug=2 # print generated values
|
||||||
|
|
||||||
|
# Fine tuning parameters to balance the number of generated invalid paths
|
||||||
|
# to the variance in generated paths.
|
||||||
|
extradotweight=64 # Larger value: more dots
|
||||||
|
extraslashweight=64 # Larger value: more slashes
|
||||||
|
extranullweight=16 # Larger value: shorter strings
|
||||||
|
|
||||||
|
die() {
|
||||||
|
echo >&2 "test case failed: " "$@"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ "$debug" -ge 1 ]]; then
|
||||||
|
echo >&2 "Generating $count random path-like strings"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Read stream of null-terminated strings entry-by-entry into bash,
|
||||||
|
# write it to a file and the `strings` array.
|
||||||
|
declare -a strings=()
|
||||||
|
mkdir -p "$tmp/strings"
|
||||||
|
while IFS= read -r -d $'\0' str; do
|
||||||
|
echo -n "$str" > "$tmp/strings/${#strings[@]}"
|
||||||
|
strings+=("$str")
|
||||||
|
done < <(awk \
|
||||||
|
-f "$SCRIPT_DIR"/generate.awk \
|
||||||
|
-v seed="$seed" \
|
||||||
|
-v count="$count" \
|
||||||
|
-v extradotweight="$extradotweight" \
|
||||||
|
-v extraslashweight="$extraslashweight" \
|
||||||
|
-v extranullweight="$extranullweight")
|
||||||
|
|
||||||
|
if [[ "$debug" -ge 1 ]]; then
|
||||||
|
echo >&2 "Trying to normalise the generated path-like strings with Nix"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Precalculate all normalisations with a single Nix call. Calling Nix for each
|
||||||
|
# string individually would take way too long
|
||||||
|
nix-instantiate --eval --strict --json \
|
||||||
|
--argstr libpath "$TEST_LIB" \
|
||||||
|
--argstr dir "$tmp/strings" \
|
||||||
|
"$SCRIPT_DIR"/prop.nix \
|
||||||
|
>"$tmp/result.json"
|
||||||
|
|
||||||
|
# Uses some jq magic to turn the resulting attribute set into an associative
|
||||||
|
# bash array assignment
|
||||||
|
declare -A normalised_result="($(jq '
|
||||||
|
to_entries
|
||||||
|
| map("[\(.key | @sh)]=\(.value | @sh)")
|
||||||
|
| join(" \n")' -r < "$tmp/result.json"))"
|
||||||
|
|
||||||
|
# Looks up a normalisation result for a string
|
||||||
|
# Checks that the normalisation is only failing iff it's an invalid subpath
|
||||||
|
# For valid subpaths, returns 0 and prints the normalisation result
|
||||||
|
# For invalid subpaths, returns 1
|
||||||
|
normalise() {
|
||||||
|
local str=$1
|
||||||
|
# Uses the same check for validity as in the library implementation
|
||||||
|
if [[ "$str" == "" || "$str" == /* || "$str" =~ ^(.*/)?\.\.(/.*)?$ ]]; then
|
||||||
|
valid=
|
||||||
|
else
|
||||||
|
valid=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
normalised=${normalised_result[$str]}
|
||||||
|
# An empty string indicates failure, this is encoded in ./prop.nix
|
||||||
|
if [[ -n "$normalised" ]]; then
|
||||||
|
if [[ -n "$valid" ]]; then
|
||||||
|
echo "$normalised"
|
||||||
|
else
|
||||||
|
die "For invalid subpath \"$str\", lib.path.subpath.normalise returned this result: \"$normalised\""
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [[ -n "$valid" ]]; then
|
||||||
|
die "For valid subpath \"$str\", lib.path.subpath.normalise failed"
|
||||||
|
else
|
||||||
|
if [[ "$debug" -ge 2 ]]; then
|
||||||
|
echo >&2 "String \"$str\" is not a valid subpath"
|
||||||
|
fi
|
||||||
|
# Invalid and it correctly failed, we let the caller continue if they catch the exit code
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Intermediate result populated by test_idempotency_realpath
|
||||||
|
# and used in test_normalise_uniqueness
|
||||||
|
#
|
||||||
|
# Contains a mapping from a normalised subpath to the realpath result it represents
|
||||||
|
declare -A norm_to_real
|
||||||
|
|
||||||
|
test_idempotency_realpath() {
|
||||||
|
if [[ "$debug" -ge 1 ]]; then
|
||||||
|
echo >&2 "Checking idempotency of each result and making sure the realpath result isn't changed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Count invalid subpaths to display stats
|
||||||
|
invalid=0
|
||||||
|
for str in "${strings[@]}"; do
|
||||||
|
if ! result=$(normalise "$str"); then
|
||||||
|
((invalid++)) || true
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check the law that it doesn't change the result of a realpath
|
||||||
|
mkdir -p -- "$str" "$result"
|
||||||
|
real_orig=$(realpath -- "$str")
|
||||||
|
real_norm=$(realpath -- "$result")
|
||||||
|
|
||||||
|
if [[ "$real_orig" != "$real_norm" ]]; then
|
||||||
|
die "realpath of the original string \"$str\" (\"$real_orig\") is not the same as realpath of the normalisation \"$result\" (\"$real_norm\")"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$debug" -ge 2 ]]; then
|
||||||
|
echo >&2 "String \"$str\" gets normalised to \"$result\" and file path \"$real_orig\""
|
||||||
|
fi
|
||||||
|
norm_to_real["$result"]="$real_orig"
|
||||||
|
done
|
||||||
|
if [[ "$debug" -ge 1 ]]; then
|
||||||
|
echo >&2 "$(bc <<< "scale=1; 100 / $count * $invalid")% of the total $count generated strings were invalid subpath strings, and were therefore ignored"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_normalise_uniqueness() {
|
||||||
|
if [[ "$debug" -ge 1 ]]; then
|
||||||
|
echo >&2 "Checking for the uniqueness law"
|
||||||
|
fi
|
||||||
|
|
||||||
|
for norm_p in "${!norm_to_real[@]}"; do
|
||||||
|
real_p=${norm_to_real["$norm_p"]}
|
||||||
|
for norm_q in "${!norm_to_real[@]}"; do
|
||||||
|
real_q=${norm_to_real["$norm_q"]}
|
||||||
|
# Checks normalisation uniqueness law for each pair of values
|
||||||
|
if [[ "$norm_p" != "$norm_q" && "$real_p" == "$real_q" ]]; then
|
||||||
|
die "Normalisations \"$norm_p\" and \"$norm_q\" are different, but the realpath of them is the same: \"$real_p\""
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
test_idempotency_realpath
|
||||||
|
test_normalise_uniqueness
|
||||||
|
|
||||||
|
echo >&2 tests ok
|
125
lib/path/tests/unit.nix
Normal file
125
lib/path/tests/unit.nix
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
# Unit tests for lib.path functions. Use `nix-build` in this directory to
|
||||||
|
# run these
|
||||||
|
{ libpath }:
|
||||||
|
let
|
||||||
|
lib = import libpath;
|
||||||
|
inherit (lib.path) subpath;
|
||||||
|
|
||||||
|
cases = lib.runTests {
|
||||||
|
testSubpathIsValidExample1 = {
|
||||||
|
expr = subpath.isValid null;
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathIsValidExample2 = {
|
||||||
|
expr = subpath.isValid "";
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathIsValidExample3 = {
|
||||||
|
expr = subpath.isValid "/foo";
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathIsValidExample4 = {
|
||||||
|
expr = subpath.isValid "../foo";
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathIsValidExample5 = {
|
||||||
|
expr = subpath.isValid "foo/bar";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
testSubpathIsValidExample6 = {
|
||||||
|
expr = subpath.isValid "./foo//bar/";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
testSubpathIsValidTwoDotsEnd = {
|
||||||
|
expr = subpath.isValid "foo/..";
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathIsValidTwoDotsMiddle = {
|
||||||
|
expr = subpath.isValid "foo/../bar";
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathIsValidTwoDotsPrefix = {
|
||||||
|
expr = subpath.isValid "..foo";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
testSubpathIsValidTwoDotsSuffix = {
|
||||||
|
expr = subpath.isValid "foo..";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
testSubpathIsValidTwoDotsPrefixComponent = {
|
||||||
|
expr = subpath.isValid "foo/..bar/baz";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
testSubpathIsValidTwoDotsSuffixComponent = {
|
||||||
|
expr = subpath.isValid "foo/bar../baz";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
testSubpathIsValidThreeDots = {
|
||||||
|
expr = subpath.isValid "...";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
testSubpathIsValidFourDots = {
|
||||||
|
expr = subpath.isValid "....";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
testSubpathIsValidThreeDotsComponent = {
|
||||||
|
expr = subpath.isValid "foo/.../bar";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
testSubpathIsValidFourDotsComponent = {
|
||||||
|
expr = subpath.isValid "foo/..../bar";
|
||||||
|
expected = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
testSubpathNormaliseExample1 = {
|
||||||
|
expr = subpath.normalise "foo//bar";
|
||||||
|
expected = "./foo/bar";
|
||||||
|
};
|
||||||
|
testSubpathNormaliseExample2 = {
|
||||||
|
expr = subpath.normalise "foo/./bar";
|
||||||
|
expected = "./foo/bar";
|
||||||
|
};
|
||||||
|
testSubpathNormaliseExample3 = {
|
||||||
|
expr = subpath.normalise "foo/bar";
|
||||||
|
expected = "./foo/bar";
|
||||||
|
};
|
||||||
|
testSubpathNormaliseExample4 = {
|
||||||
|
expr = subpath.normalise "foo/bar/";
|
||||||
|
expected = "./foo/bar";
|
||||||
|
};
|
||||||
|
testSubpathNormaliseExample5 = {
|
||||||
|
expr = subpath.normalise "foo/bar/.";
|
||||||
|
expected = "./foo/bar";
|
||||||
|
};
|
||||||
|
testSubpathNormaliseExample6 = {
|
||||||
|
expr = subpath.normalise ".";
|
||||||
|
expected = "./.";
|
||||||
|
};
|
||||||
|
testSubpathNormaliseExample7 = {
|
||||||
|
expr = (builtins.tryEval (subpath.normalise "foo/../bar")).success;
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathNormaliseExample8 = {
|
||||||
|
expr = (builtins.tryEval (subpath.normalise "")).success;
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathNormaliseExample9 = {
|
||||||
|
expr = (builtins.tryEval (subpath.normalise "/foo")).success;
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathNormaliseIsValidDots = {
|
||||||
|
expr = subpath.normalise "./foo/.bar/.../baz...qux";
|
||||||
|
expected = "./foo/.bar/.../baz...qux";
|
||||||
|
};
|
||||||
|
testSubpathNormaliseWrongType = {
|
||||||
|
expr = (builtins.tryEval (subpath.normalise null)).success;
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
testSubpathNormaliseTwoDots = {
|
||||||
|
expr = (builtins.tryEval (subpath.normalise "..")).success;
|
||||||
|
expected = false;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in
|
||||||
|
if cases == [] then "Unit tests successful"
|
||||||
|
else throw "Path unit tests failed: ${lib.generators.toPretty {} cases}"
|
@ -15,6 +15,9 @@ pkgs.runCommand "nixpkgs-lib-tests" {
|
|||||||
inherit pkgs;
|
inherit pkgs;
|
||||||
lib = import ../.;
|
lib = import ../.;
|
||||||
})
|
})
|
||||||
|
(import ../path/tests {
|
||||||
|
inherit pkgs;
|
||||||
|
})
|
||||||
];
|
];
|
||||||
} ''
|
} ''
|
||||||
datadir="${pkgs.nix}/share"
|
datadir="${pkgs.nix}/share"
|
||||||
|
@ -358,6 +358,13 @@
|
|||||||
And backup your data.
|
And backup your data.
|
||||||
</para>
|
</para>
|
||||||
</listitem>
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>
|
||||||
|
<literal>services.chronyd</literal> is now started with
|
||||||
|
additional systemd sandbox/hardening options for better
|
||||||
|
security.
|
||||||
|
</para>
|
||||||
|
</listitem>
|
||||||
<listitem>
|
<listitem>
|
||||||
<para>
|
<para>
|
||||||
The module <literal>services.headscale</literal> was
|
The module <literal>services.headscale</literal> was
|
||||||
|
@ -98,6 +98,8 @@ In addition to numerous new and upgraded packages, this release has the followin
|
|||||||
|
|
||||||
And backup your data.
|
And backup your data.
|
||||||
|
|
||||||
|
- `services.chronyd` is now started with additional systemd sandbox/hardening options for better security.
|
||||||
|
|
||||||
- The module `services.headscale` was refactored to be compliant with [RFC 0042](https://github.com/NixOS/rfcs/blob/master/rfcs/0042-config-option.md). To be precise, this means that the following things have changed:
|
- The module `services.headscale` was refactored to be compliant with [RFC 0042](https://github.com/NixOS/rfcs/blob/master/rfcs/0042-config-option.md). To be precise, this means that the following things have changed:
|
||||||
|
|
||||||
- Most settings has been migrated under [services.headscale.settings](#opt-services.headscale.settings) which is an attribute-set that
|
- Most settings has been migrated under [services.headscale.settings](#opt-services.headscale.settings) which is an attribute-set that
|
||||||
|
@ -135,7 +135,7 @@ in
|
|||||||
# The SSH agent protocol doesn't have support for changing TTYs; however we
|
# The SSH agent protocol doesn't have support for changing TTYs; however we
|
||||||
# can simulate this with the `exec` feature of openssh (see ssh_config(5))
|
# can simulate this with the `exec` feature of openssh (see ssh_config(5))
|
||||||
# that hooks a command to the shell currently running the ssh program.
|
# that hooks a command to the shell currently running the ssh program.
|
||||||
Match host * exec "${cfg.package}/bin/gpg-connect-agent --quiet updatestartuptty /bye >/dev/null 2>&1"
|
Match host * exec "${pkgs.runtimeShell} -c '${cfg.package}/bin/gpg-connect-agent --quiet updatestartuptty /bye >/dev/null 2>&1'"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
environment.extraInit = mkIf cfg.agent.enableSSHSupport ''
|
environment.extraInit = mkIf cfg.agent.enableSSHSupport ''
|
||||||
|
@ -33,6 +33,31 @@
|
|||||||
"actions": {
|
"actions": {
|
||||||
"update-props": {}
|
"update-props": {}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"matches": [
|
||||||
|
{
|
||||||
|
"application.process.binary": "jack_bufsize"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": {
|
||||||
|
"update-props": {
|
||||||
|
"jack.global-buffer-size": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"matches": [
|
||||||
|
{
|
||||||
|
"application.process.binary": "qsynth"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": {
|
||||||
|
"update-props": {
|
||||||
|
"node.pause-on-idle": false,
|
||||||
|
"node.passive": true
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -32,10 +32,12 @@
|
|||||||
"args": {}
|
"args": {}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"context.exec": [
|
"context.exec": [],
|
||||||
|
"pulse.cmd": [
|
||||||
{
|
{
|
||||||
"path": "pactl",
|
"cmd": "load-module",
|
||||||
"args": "load-module module-always-sink"
|
"args": "module-always-sink",
|
||||||
|
"flags": []
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"stream.properties": {},
|
"stream.properties": {},
|
||||||
@ -89,13 +91,14 @@
|
|||||||
{
|
{
|
||||||
"matches": [
|
"matches": [
|
||||||
{
|
{
|
||||||
"application.name": "~speech-dispatcher*"
|
"application.name": "~speech-dispatcher.*"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"actions": {
|
"actions": {
|
||||||
"update-props": {
|
"update-props": {
|
||||||
"pulse.min.req": "1024/48000",
|
"pulse.min.req": "512/48000",
|
||||||
"pulse.min.quantum": "1024/48000"
|
"pulse.min.quantum": "512/48000",
|
||||||
|
"pulse.idle.timeout": 5
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,6 +70,14 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "libpipewire-module-session-manager"
|
"name": "libpipewire-module-session-manager"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "libpipewire-module-x11-bell",
|
||||||
|
"args": {},
|
||||||
|
"flags": [
|
||||||
|
"ifexists",
|
||||||
|
"nofail"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"context.objects": [
|
"context.objects": [
|
||||||
|
@ -147,9 +147,9 @@ in
|
|||||||
systemd.services.systemd-timedated.environment = { SYSTEMD_TIMEDATED_NTP_SERVICES = "chronyd.service"; };
|
systemd.services.systemd-timedated.environment = { SYSTEMD_TIMEDATED_NTP_SERVICES = "chronyd.service"; };
|
||||||
|
|
||||||
systemd.tmpfiles.rules = [
|
systemd.tmpfiles.rules = [
|
||||||
"d ${stateDir} 0755 chrony chrony - -"
|
"d ${stateDir} 0750 chrony chrony - -"
|
||||||
"f ${driftFile} 0640 chrony chrony -"
|
"f ${driftFile} 0640 chrony chrony - -"
|
||||||
"f ${keyFile} 0640 chrony chrony -"
|
"f ${keyFile} 0640 chrony chrony - -"
|
||||||
];
|
];
|
||||||
|
|
||||||
systemd.services.chronyd =
|
systemd.services.chronyd =
|
||||||
@ -164,15 +164,47 @@ in
|
|||||||
path = [ chronyPkg ];
|
path = [ chronyPkg ];
|
||||||
|
|
||||||
unitConfig.ConditionCapability = "CAP_SYS_TIME";
|
unitConfig.ConditionCapability = "CAP_SYS_TIME";
|
||||||
serviceConfig =
|
serviceConfig = {
|
||||||
{ Type = "simple";
|
Type = "simple";
|
||||||
ExecStart = "${chronyPkg}/bin/chronyd ${builtins.toString chronyFlags}";
|
ExecStart = "${chronyPkg}/bin/chronyd ${builtins.toString chronyFlags}";
|
||||||
|
|
||||||
ProtectHome = "yes";
|
# Proc filesystem
|
||||||
|
ProcSubset = "pid";
|
||||||
|
ProtectProc = "invisible";
|
||||||
|
# Access write directories
|
||||||
|
ReadWritePaths = [ "${stateDir}" ];
|
||||||
|
UMask = "0027";
|
||||||
|
# Capabilities
|
||||||
|
CapabilityBoundingSet = [ "CAP_CHOWN" "CAP_DAC_OVERRIDE" "CAP_NET_BIND_SERVICE" "CAP_SETGID" "CAP_SETUID" "CAP_SYS_RESOURCE" "CAP_SYS_TIME" ];
|
||||||
|
# Device Access
|
||||||
|
DeviceAllow = [ "char-pps rw" "char-ptp rw" "char-rtc rw" ];
|
||||||
|
DevicePolicy = "closed";
|
||||||
|
# Security
|
||||||
|
NoNewPrivileges = true;
|
||||||
|
# Sandboxing
|
||||||
ProtectSystem = "full";
|
ProtectSystem = "full";
|
||||||
PrivateTmp = "yes";
|
ProtectHome = true;
|
||||||
|
PrivateTmp = true;
|
||||||
|
PrivateDevices = true;
|
||||||
|
PrivateUsers = false;
|
||||||
|
ProtectHostname = true;
|
||||||
|
ProtectClock = false;
|
||||||
|
ProtectKernelTunables = true;
|
||||||
|
ProtectKernelModules = true;
|
||||||
|
ProtectKernelLogs = true;
|
||||||
|
ProtectControlGroups = true;
|
||||||
|
RestrictAddressFamilies = [ "AF_UNIX" "AF_INET" "AF_INET6" ];
|
||||||
|
RestrictNamespaces = true;
|
||||||
|
LockPersonality = true;
|
||||||
|
MemoryDenyWriteExecute = true;
|
||||||
|
RestrictRealtime = true;
|
||||||
|
RestrictSUIDSGID = true;
|
||||||
|
RemoveIPC = true;
|
||||||
|
PrivateMounts = true;
|
||||||
|
# System Call Filtering
|
||||||
|
SystemCallArchitectures = "native";
|
||||||
|
SystemCallFilter = [ "~@cpu-emulation @debug @keyring @mount @obsolete @privileged @resources" "@clock" "@setuid" "capset" "chown" ];
|
||||||
};
|
};
|
||||||
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -454,8 +454,9 @@ in {
|
|||||||
|
|
||||||
# A placeholder file for invalid barcodes
|
# A placeholder file for invalid barcodes
|
||||||
invalid_barcode_location="${cfg.dataDir}/public/uploads/barcodes/invalid_barcode.gif"
|
invalid_barcode_location="${cfg.dataDir}/public/uploads/barcodes/invalid_barcode.gif"
|
||||||
[ ! -e "$invalid_barcode_location" ] \
|
if [ ! -e "$invalid_barcode_location" ]; then
|
||||||
&& cp ${snipe-it}/share/snipe-it/invalid_barcode.gif "$invalid_barcode_location"
|
cp ${snipe-it}/share/snipe-it/invalid_barcode.gif "$invalid_barcode_location"
|
||||||
|
fi
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ let
|
|||||||
inherit (lib)
|
inherit (lib)
|
||||||
getBin optionalString literalExpression
|
getBin optionalString literalExpression
|
||||||
mkRemovedOptionModule mkRenamedOptionModule
|
mkRemovedOptionModule mkRenamedOptionModule
|
||||||
mkDefault mkIf mkMerge mkOption types;
|
mkDefault mkIf mkMerge mkOption mkPackageOption types;
|
||||||
|
|
||||||
ini = pkgs.formats.ini { };
|
ini = pkgs.formats.ini { };
|
||||||
|
|
||||||
@ -198,6 +198,11 @@ in
|
|||||||
example = literalExpression "[ pkgs.plasma5Packages.oxygen ]";
|
example = literalExpression "[ pkgs.plasma5Packages.oxygen ]";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
notoPackage = mkPackageOption pkgs "Noto fonts" {
|
||||||
|
default = [ "noto-fonts" ];
|
||||||
|
example = "noto-fonts-lgc-plus";
|
||||||
|
};
|
||||||
|
|
||||||
# Internally allows configuring kdeglobals globally
|
# Internally allows configuring kdeglobals globally
|
||||||
kdeglobals = mkOption {
|
kdeglobals = mkOption {
|
||||||
internal = true;
|
internal = true;
|
||||||
@ -401,7 +406,7 @@ in
|
|||||||
# Enable GTK applications to load SVG icons
|
# Enable GTK applications to load SVG icons
|
||||||
services.xserver.gdk-pixbuf.modulePackages = [ pkgs.librsvg ];
|
services.xserver.gdk-pixbuf.modulePackages = [ pkgs.librsvg ];
|
||||||
|
|
||||||
fonts.fonts = with pkgs; [ noto-fonts hack-font ];
|
fonts.fonts = with pkgs; [ cfg.notoPackage hack-font ];
|
||||||
fonts.fontconfig.defaultFonts = {
|
fonts.fontconfig.defaultFonts = {
|
||||||
monospace = [ "Hack" "Noto Sans Mono" ];
|
monospace = [ "Hack" "Noto Sans Mono" ];
|
||||||
sansSerif = [ "Noto Sans" ];
|
sansSerif = [ "Noto Sans" ];
|
||||||
@ -545,7 +550,7 @@ in
|
|||||||
}
|
}
|
||||||
{
|
{
|
||||||
# The user interface breaks without pulse
|
# The user interface breaks without pulse
|
||||||
assertion = config.hardware.pulseaudio.enable;
|
assertion = config.hardware.pulseaudio.enable || (config.services.pipewire.enable && config.services.pipewire.pulse.enable);
|
||||||
message = "Plasma Mobile requires pulseaudio.";
|
message = "Plasma Mobile requires pulseaudio.";
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
@ -31,7 +31,6 @@ in
|
|||||||
type = types.package;
|
type = types.package;
|
||||||
default = pkgs.i3;
|
default = pkgs.i3;
|
||||||
defaultText = literalExpression "pkgs.i3";
|
defaultText = literalExpression "pkgs.i3";
|
||||||
example = literalExpression "pkgs.i3-gaps";
|
|
||||||
description = lib.mdDoc ''
|
description = lib.mdDoc ''
|
||||||
i3 package to use.
|
i3 package to use.
|
||||||
'';
|
'';
|
||||||
@ -73,6 +72,6 @@ in
|
|||||||
|
|
||||||
imports = [
|
imports = [
|
||||||
(mkRemovedOptionModule [ "services" "xserver" "windowManager" "i3-gaps" "enable" ]
|
(mkRemovedOptionModule [ "services" "xserver" "windowManager" "i3-gaps" "enable" ]
|
||||||
"Use services.xserver.windowManager.i3.enable and set services.xserver.windowManager.i3.package to pkgs.i3-gaps to use i3-gaps.")
|
"i3-gaps was merged into i3. Use services.xserver.windowManager.i3.enable instead.")
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
@ -56,12 +56,8 @@ in
|
|||||||
|
|
||||||
wantedBy = [ "multi-user.target" ];
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
|
||||||
unitConfig = {
|
|
||||||
ConditionPathExists = "/var/lib/waydroid/lxc/waydroid";
|
|
||||||
};
|
|
||||||
|
|
||||||
serviceConfig = {
|
serviceConfig = {
|
||||||
ExecStart = "${pkgs.waydroid}/bin/waydroid container start";
|
ExecStart = "${pkgs.waydroid}/bin/waydroid -w container start";
|
||||||
ExecStop = "${pkgs.waydroid}/bin/waydroid container stop";
|
ExecStop = "${pkgs.waydroid}/bin/waydroid container stop";
|
||||||
ExecStopPost = "${pkgs.waydroid}/bin/waydroid session stop";
|
ExecStopPost = "${pkgs.waydroid}/bin/waydroid session stop";
|
||||||
};
|
};
|
||||||
|
@ -9,16 +9,16 @@
|
|||||||
|
|
||||||
rustPackages.rustPlatform.buildRustPackage rec {
|
rustPackages.rustPlatform.buildRustPackage rec {
|
||||||
pname = "spotifyd";
|
pname = "spotifyd";
|
||||||
version = "0.3.3";
|
version = "0.3.4";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "Spotifyd";
|
owner = "Spotifyd";
|
||||||
repo = "spotifyd";
|
repo = "spotifyd";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "1liql2wp7cx0x4ha1578wx3m4byd295m4ph268s05yw2wrnr3v6c";
|
sha256 = "sha256-9zwHBDrdvE2R/cdrWgjsfHlm3wEZ9SB2VNcqezB/Op0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoSha256 = "1plvqd55d1gj0ydimv3154pwgj2sh1fqx2182nw8akzdfmzg1150";
|
cargoSha256 = "sha256-fQm7imXpm5AcKdg0cU/Rf2mAeg2ebZKRisJZSnG0REI=";
|
||||||
|
|
||||||
nativeBuildInputs = [ pkg-config ];
|
nativeBuildInputs = [ pkg-config ];
|
||||||
|
|
||||||
|
@ -31,11 +31,11 @@
|
|||||||
|
|
||||||
mkDerivation rec {
|
mkDerivation rec {
|
||||||
pname = "saga";
|
pname = "saga";
|
||||||
version = "8.4.0";
|
version = "8.5.0";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://sourceforge/saga-gis/SAGA%20-%20${lib.versions.major version}/SAGA%20-%20${version}/saga-${version}.tar.gz";
|
url = "mirror://sourceforge/saga-gis/SAGA%20-%20${lib.versions.major version}/SAGA%20-%20${version}/saga-${version}.tar.gz";
|
||||||
sha256 = "sha256-v6DPwV20fcsznrEaFJk0/ewU4z3cTjzYYuLkyMwSLV0=";
|
sha256 = "sha256-JzSuu1wGfCkxIDcTbP5jpHtJNvl8eAP3jznXvwSPeY0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
sourceRoot = "saga-${version}/saga-gis";
|
sourceRoot = "saga-${version}/saga-gis";
|
||||||
|
@ -15,19 +15,19 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "eyedropper";
|
pname = "eyedropper";
|
||||||
version = "0.4.0";
|
version = "0.5.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "FineFindus";
|
owner = "FineFindus";
|
||||||
repo = pname;
|
repo = pname;
|
||||||
rev = version;
|
rev = "v${version}";
|
||||||
hash = "sha256-bOpwHaFOoUlh+yyC1go6BeFxfJhUmwZPi6kYAqCagEI=";
|
hash = "sha256-sDrMIryVFkjMGHbYvNDmKb1HyJNGb3Hd+muxUJKhogE=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoDeps = rustPlatform.fetchCargoTarball {
|
cargoDeps = rustPlatform.fetchCargoTarball {
|
||||||
inherit src;
|
inherit src;
|
||||||
name = "${pname}-${version}";
|
name = "${pname}-${version}";
|
||||||
hash = "sha256-TkdOq+icU2zNbXzN6nbkXjL1o/Lfumqr/5S0pQaxY5Q=";
|
hash = "sha256-mztc44hHdqzR3WbG6tkCL38EfgBajRLlpMC8ElpXnlo=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
python3Packages.buildPythonApplication rec {
|
python3Packages.buildPythonApplication rec {
|
||||||
pname = "flexget";
|
pname = "flexget";
|
||||||
version = "3.5.13";
|
version = "3.5.16";
|
||||||
format = "pyproject";
|
format = "pyproject";
|
||||||
|
|
||||||
# Fetch from GitHub in order to use `requirements.in`
|
# Fetch from GitHub in order to use `requirements.in`
|
||||||
@ -13,7 +13,7 @@ python3Packages.buildPythonApplication rec {
|
|||||||
owner = "flexget";
|
owner = "flexget";
|
||||||
repo = "flexget";
|
repo = "flexget";
|
||||||
rev = "refs/tags/v${version}";
|
rev = "refs/tags/v${version}";
|
||||||
hash = "sha256-0yO4prnYJkD7eiyrEOPHlDTsgGgRhQujsp8k2FsLYKI=";
|
hash = "sha256-9hcl7OZLi86hZHLotsN1QlPzQ1Ep5vJumAyZxSxxIE8=";
|
||||||
};
|
};
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
|
@ -4,16 +4,16 @@ let
|
|||||||
common = { stname, target, postInstall ? "" }:
|
common = { stname, target, postInstall ? "" }:
|
||||||
buildGoModule rec {
|
buildGoModule rec {
|
||||||
pname = stname;
|
pname = stname;
|
||||||
version = "1.22.2";
|
version = "1.23.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "syncthing";
|
owner = "syncthing";
|
||||||
repo = "syncthing";
|
repo = "syncthing";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-t1JIkUjSEshSm3Zi5Ck8IOmTv2tC0dUYyJvlKua/BcI=";
|
hash = "sha256-Z4YVU45na4BgIbN/IlORpTCuf2EuSuOyppDRzswn3EI=";
|
||||||
};
|
};
|
||||||
|
|
||||||
vendorSha256 = "sha256-UdzWD8I8ulPBXdF5wZQ7hQoVO9Bnj18Gw5t4wqolSPA=";
|
vendorHash = "sha256-q63iaRxJRvPY0Np20O6JmdMEjSg/kxRneBfs8fRTwXk=";
|
||||||
|
|
||||||
doCheck = false;
|
doCheck = false;
|
||||||
|
|
||||||
|
@ -41,6 +41,13 @@ stdenv.mkDerivation rec {
|
|||||||
sha256 = "sha256-vwVQnY9EUCXPzhDJ4PSOmQStb9eF6H0yAOiEmL6sAlk=";
|
sha256 = "sha256-vwVQnY9EUCXPzhDJ4PSOmQStb9eF6H0yAOiEmL6sAlk=";
|
||||||
excludes = [ "doc/NEWS.md" ];
|
excludes = [ "doc/NEWS.md" ];
|
||||||
})
|
})
|
||||||
|
|
||||||
|
# Fix included bug with boost >= 1.76. Remove with the next release
|
||||||
|
(fetchpatch {
|
||||||
|
url = "https://github.com/ledger/ledger/commit/1cb9b84fdecc5604bd1172cdd781859ff3871a52.patch";
|
||||||
|
sha256 = "sha256-ipVkRcTmnEvpfyPgMzLVJ9Sz8QxHeCURQI5dX8xh758=";
|
||||||
|
excludes = [ "test/regress/*" ];
|
||||||
|
})
|
||||||
];
|
];
|
||||||
|
|
||||||
installTargets = [ "doc" "install" ];
|
installTargets = [ "doc" "install" ];
|
||||||
|
@ -8,11 +8,11 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "dataexplorer";
|
pname = "dataexplorer";
|
||||||
version = "3.7.3";
|
version = "3.7.4";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://savannah/dataexplorer/dataexplorer-${version}-src.tar.gz";
|
url = "mirror://savannah/dataexplorer/dataexplorer-${version}-src.tar.gz";
|
||||||
sha256 = "sha256-cqvlPV4i9m0x3hbruC5y2APsyjfI5y9RT8XVzsDaT/Q=";
|
sha256 = "sha256-bghI7Hun7ZKUVEj7T58K0oaclnhUGd4z+eIqZF3eXHQ=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ ant makeWrapper ];
|
nativeBuildInputs = [ ant makeWrapper ];
|
||||||
|
86
pkgs/applications/version-management/deepgit/default.nix
Normal file
86
pkgs/applications/version-management/deepgit/default.nix
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
{ copyDesktopItems
|
||||||
|
, fetchurl
|
||||||
|
, glib
|
||||||
|
, gnome
|
||||||
|
, gtk3
|
||||||
|
, jre
|
||||||
|
, lib
|
||||||
|
, makeDesktopItem
|
||||||
|
, stdenv
|
||||||
|
, wrapGAppsHook
|
||||||
|
}:
|
||||||
|
|
||||||
|
stdenv.mkDerivation rec {
|
||||||
|
pname = "deepgit";
|
||||||
|
version = "4.3";
|
||||||
|
|
||||||
|
src = fetchurl {
|
||||||
|
url = "https://www.syntevo.com/downloads/deepgit/deepgit-linux-${lib.replaceStrings [ "." ] [ "_" ] version}.tar.gz";
|
||||||
|
hash = "sha256-bA/EySZjuSDYaZplwHcpeP1VakcnG5K1hYTk7cSVbz0=";
|
||||||
|
};
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
copyDesktopItems
|
||||||
|
wrapGAppsHook
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = [
|
||||||
|
gnome.adwaita-icon-theme
|
||||||
|
gtk3
|
||||||
|
jre
|
||||||
|
];
|
||||||
|
|
||||||
|
preFixup = ''
|
||||||
|
gappsWrapperArgs+=(
|
||||||
|
--prefix LD_LIBRARY_PATH : ${lib.makeLibraryPath [ glib gtk3 ]}
|
||||||
|
--set DEEPGIT_JAVA_HOME ${jre}
|
||||||
|
)
|
||||||
|
patchShebangs bin/deepgit.sh
|
||||||
|
'';
|
||||||
|
|
||||||
|
desktopItems = [(makeDesktopItem rec {
|
||||||
|
name = pname;
|
||||||
|
desktopName = "DeepGit";
|
||||||
|
keywords = [ "git" ];
|
||||||
|
comment = "Git-Client";
|
||||||
|
categories = [
|
||||||
|
"Development"
|
||||||
|
"RevisionControl"
|
||||||
|
];
|
||||||
|
terminal = false;
|
||||||
|
startupNotify = true;
|
||||||
|
startupWMClass = desktopName;
|
||||||
|
exec = pname;
|
||||||
|
mimeTypes = [
|
||||||
|
"x-scheme-handler/${pname}"
|
||||||
|
"x-scheme-handler/sourcetree"
|
||||||
|
];
|
||||||
|
icon = pname;
|
||||||
|
})];
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
|
||||||
|
mkdir -pv $out/{bin,share/icons/hicolor/scalable/apps/}
|
||||||
|
cp -a lib license.html $out
|
||||||
|
mv bin/deepgit.sh $out/bin/deepgit
|
||||||
|
|
||||||
|
for icon_size in 32 48 64 128 256; do
|
||||||
|
path=$icon_size'x'$icon_size
|
||||||
|
icon=bin/deepgit-$icon_size.png
|
||||||
|
mkdir -p $out/share/icons/hicolor/$path/apps
|
||||||
|
cp $icon $out/share/icons/hicolor/$path/apps/deepgit.png
|
||||||
|
done
|
||||||
|
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
description = "A tool to investigate the history of source code";
|
||||||
|
homepage = "https://www.syntevo.com/deepgit";
|
||||||
|
changelog = "https://www.syntevo.com/deepgit/changelog.txt";
|
||||||
|
license = licenses.unfree;
|
||||||
|
maintainers = with maintainers; [ urandom ];
|
||||||
|
platforms = platforms.linux;
|
||||||
|
};
|
||||||
|
}
|
@ -7,11 +7,11 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "i3";
|
pname = "i3";
|
||||||
version = "4.21.1";
|
version = "4.22";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "https://i3wm.org/downloads/${pname}-${version}.tar.xz";
|
url = "https://i3wm.org/downloads/${pname}-${version}.tar.xz";
|
||||||
sha256 = "sha256-7f14EoXGVKBdxtsnLOAwDEQo5vvYddmZZOV94ltBvB4=";
|
sha256 = "sha256-KGOZEeWdlWOfCSZCqYL14d6lkiUMK1zpjtoQCDNRPks=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -1,29 +0,0 @@
|
|||||||
{ fetchFromGitHub, lib, i3 }:
|
|
||||||
|
|
||||||
i3.overrideAttrs (oldAttrs : rec {
|
|
||||||
pname = "i3-gaps";
|
|
||||||
version = "4.21.1";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
|
||||||
owner = "Airblader";
|
|
||||||
repo = "i3";
|
|
||||||
rev = version;
|
|
||||||
sha256 = "sha256-+JxJjvzEuAA4CH+gufzAzIqd5BSvHtPvLm2zTfXc/xk=";
|
|
||||||
};
|
|
||||||
|
|
||||||
meta = with lib; {
|
|
||||||
description = "A fork of the i3 tiling window manager with some additional features";
|
|
||||||
homepage = "https://github.com/Airblader/i3";
|
|
||||||
maintainers = with maintainers; [ fmthoma ];
|
|
||||||
license = licenses.bsd3;
|
|
||||||
platforms = platforms.linux ++ platforms.netbsd ++ platforms.openbsd;
|
|
||||||
|
|
||||||
longDescription = ''
|
|
||||||
Fork of i3wm, a tiling window manager primarily targeted at advanced users
|
|
||||||
and developers. Based on a tree as data structure, supports tiling,
|
|
||||||
stacking, and tabbing layouts, handled dynamically, as well as floating
|
|
||||||
windows. This fork adds a few features such as gaps between windows.
|
|
||||||
Configured via plain text file. Multi-monitor. UTF-8 clean.
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
})
|
|
@ -11,39 +11,10 @@
|
|||||||
, imagemagick
|
, imagemagick
|
||||||
, zopfli
|
, zopfli
|
||||||
, buildPackages
|
, buildPackages
|
||||||
|
, variants ? [ ]
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
mkNoto = { pname, weights }:
|
notoLongDescription = ''
|
||||||
stdenvNoCC.mkDerivation {
|
|
||||||
inherit pname;
|
|
||||||
version = "2020-01-23";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
|
||||||
owner = "googlefonts";
|
|
||||||
repo = "noto-fonts";
|
|
||||||
rev = "f4726a2ec36169abd02a6d8abe67c8ff0236f6d8";
|
|
||||||
sha256 = "0zc1r7zph62qmvzxqfflsprazjf6x1qnwc2ma27kyzh6v36gaykw";
|
|
||||||
};
|
|
||||||
|
|
||||||
installPhase = ''
|
|
||||||
# We copy in reverse preference order -- unhinted first, then
|
|
||||||
# hinted -- to get the "best" version of each font while
|
|
||||||
# maintaining maximum coverage.
|
|
||||||
#
|
|
||||||
# TODO: install OpenType, variable versions?
|
|
||||||
local out_ttf=$out/share/fonts/truetype/noto
|
|
||||||
install -m444 -Dt $out_ttf phaseIII_only/unhinted/ttf/*/*-${weights}.ttf
|
|
||||||
install -m444 -Dt $out_ttf phaseIII_only/hinted/ttf/*/*-${weights}.ttf
|
|
||||||
install -m444 -Dt $out_ttf unhinted/*/*-${weights}.ttf
|
|
||||||
install -m444 -Dt $out_ttf hinted/*/*-${weights}.ttf
|
|
||||||
'';
|
|
||||||
|
|
||||||
meta = with lib; {
|
|
||||||
description = "Beautiful and free fonts for many languages";
|
|
||||||
homepage = "https://www.google.com/get/noto/";
|
|
||||||
longDescription =
|
|
||||||
''
|
|
||||||
When text is rendered by a computer, sometimes characters are
|
When text is rendered by a computer, sometimes characters are
|
||||||
displayed as “tofu”. They are little boxes to indicate your device
|
displayed as “tofu”. They are little boxes to indicate your device
|
||||||
doesn’t have a font to display the text.
|
doesn’t have a font to display the text.
|
||||||
@ -56,6 +27,52 @@ let
|
|||||||
|
|
||||||
This package also includes the Arimo, Cousine, and Tinos fonts.
|
This package also includes the Arimo, Cousine, and Tinos fonts.
|
||||||
'';
|
'';
|
||||||
|
in
|
||||||
|
rec {
|
||||||
|
mkNoto =
|
||||||
|
{ pname
|
||||||
|
, weights
|
||||||
|
, variants ? [ ]
|
||||||
|
, longDescription ? notoLongDescription
|
||||||
|
}:
|
||||||
|
stdenvNoCC.mkDerivation rec {
|
||||||
|
inherit pname;
|
||||||
|
version = "20201206-phase3";
|
||||||
|
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "googlefonts";
|
||||||
|
repo = "noto-fonts";
|
||||||
|
rev = "v${version}";
|
||||||
|
hash = "sha256-x60RvCRFLoGe0CNvswROnDkIsUFbWH+/laN8q2qkUPk=";
|
||||||
|
};
|
||||||
|
|
||||||
|
_variants = map (variant: builtins.replaceStrings [ " " ] [ "" ] variant) variants;
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
# We copy in reverse preference order -- unhinted first, then
|
||||||
|
# hinted -- to get the "best" version of each font while
|
||||||
|
# maintaining maximum coverage.
|
||||||
|
#
|
||||||
|
# TODO: install OpenType, variable versions?
|
||||||
|
local out_ttf=$out/share/fonts/truetype/noto
|
||||||
|
'' + (if _variants == [ ] then ''
|
||||||
|
install -m444 -Dt $out_ttf archive/unhinted/*/*-${weights}.ttf
|
||||||
|
install -m444 -Dt $out_ttf archive/hinted/*/*-${weights}.ttf
|
||||||
|
install -m444 -Dt $out_ttf unhinted/*/*/*-${weights}.ttf
|
||||||
|
install -m444 -Dt $out_ttf hinted/*/*/*-${weights}.ttf
|
||||||
|
'' else ''
|
||||||
|
for variant in $_variants; do
|
||||||
|
install -m444 -Dt $out_ttf archive/unhinted/$variant/*-${weights}.ttf
|
||||||
|
install -m444 -Dt $out_ttf archive/hinted/$variant/*-${weights}.ttf
|
||||||
|
install -m444 -Dt $out_ttf unhinted/*/$variant/*-${weights}.ttf
|
||||||
|
install -m444 -Dt $out_ttf hinted/*/$variant/*-${weights}.ttf
|
||||||
|
done
|
||||||
|
'');
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
description = "Beautiful and free fonts for many languages";
|
||||||
|
homepage = "https://www.google.com/get/noto/";
|
||||||
|
inherit longDescription;
|
||||||
license = licenses.ofl;
|
license = licenses.ofl;
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
maintainers = with maintainers; [ mathnerd314 emily ];
|
maintainers = with maintainers; [ mathnerd314 emily ];
|
||||||
@ -100,14 +117,34 @@ let
|
|||||||
maintainers = with maintainers; [ mathnerd314 emily ];
|
maintainers = with maintainers; [ mathnerd314 emily ];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
in
|
|
||||||
|
|
||||||
{
|
|
||||||
noto-fonts = mkNoto {
|
noto-fonts = mkNoto {
|
||||||
pname = "noto-fonts";
|
pname = "noto-fonts";
|
||||||
weights = "{Regular,Bold,Light,Italic,BoldItalic,LightItalic}";
|
weights = "{Regular,Bold,Light,Italic,BoldItalic,LightItalic}";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
noto-fonts-lgc-plus = mkNoto {
|
||||||
|
pname = "noto-fonts-lgc-plus";
|
||||||
|
weights = "{Regular,Bold,Light,Italic,BoldItalic,LightItalic}";
|
||||||
|
variants = [
|
||||||
|
"Noto Sans"
|
||||||
|
"Noto Serif"
|
||||||
|
"Noto Sans Display"
|
||||||
|
"Noto Serif Display"
|
||||||
|
"Noto Sans Mono"
|
||||||
|
"Noto Music"
|
||||||
|
"Noto Sans Symbols"
|
||||||
|
"Noto Sans Symbols 2"
|
||||||
|
"Noto Sans Math"
|
||||||
|
];
|
||||||
|
longDescription = ''
|
||||||
|
This package provides the Noto Fonts, but only for latin, greek
|
||||||
|
and cyrillic scripts, as well as some extra fonts. To create a
|
||||||
|
custom Noto package with custom variants, see the `mkNoto`
|
||||||
|
helper function.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
noto-fonts-extra = mkNoto {
|
noto-fonts-extra = mkNoto {
|
||||||
pname = "noto-fonts-extra";
|
pname = "noto-fonts-extra";
|
||||||
weights = "{Black,Condensed,Extra,Medium,Semi,Thin}*";
|
weights = "{Black,Condensed,Extra,Medium,Semi,Thin}*";
|
||||||
@ -127,11 +164,13 @@ in
|
|||||||
sha256 = "sha256-1w66Ge7DZjbONGhxSz69uFhfsjMsDiDkrGl6NsoB7dY=";
|
sha256 = "sha256-1w66Ge7DZjbONGhxSz69uFhfsjMsDiDkrGl6NsoB7dY=";
|
||||||
};
|
};
|
||||||
|
|
||||||
noto-fonts-emoji = let
|
noto-fonts-emoji =
|
||||||
|
let
|
||||||
version = "2.038";
|
version = "2.038";
|
||||||
emojiPythonEnv =
|
emojiPythonEnv =
|
||||||
buildPackages.python3.withPackages (p: with p; [ fonttools nototools ]);
|
buildPackages.python3.withPackages (p: with p; [ fonttools nototools ]);
|
||||||
in stdenvNoCC.mkDerivation {
|
in
|
||||||
|
stdenvNoCC.mkDerivation {
|
||||||
pname = "noto-fonts-emoji";
|
pname = "noto-fonts-emoji";
|
||||||
inherit version;
|
inherit version;
|
||||||
|
|
||||||
|
52
pkgs/development/compilers/gcc-arm-embedded/12/default.nix
Normal file
52
pkgs/development/compilers/gcc-arm-embedded/12/default.nix
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
{ lib
|
||||||
|
, stdenv
|
||||||
|
, fetchurl
|
||||||
|
, ncurses5
|
||||||
|
, python38
|
||||||
|
}:
|
||||||
|
|
||||||
|
stdenv.mkDerivation rec {
|
||||||
|
pname = "gcc-arm-embedded";
|
||||||
|
version = "12.2.rel1";
|
||||||
|
|
||||||
|
platform = {
|
||||||
|
aarch64-linux = "aarch64";
|
||||||
|
x86_64-darwin = "darwin-x86_64";
|
||||||
|
x86_64-linux = "x86_64";
|
||||||
|
}.${stdenv.hostPlatform.system} or (throw "Unsupported system: ${stdenv.hostPlatform.system}");
|
||||||
|
|
||||||
|
src = fetchurl {
|
||||||
|
url = "https://developer.arm.com/-/media/Files/downloads/gnu/${version}/binrel/arm-gnu-toolchain-${version}-${platform}-arm-none-eabi.tar.xz";
|
||||||
|
sha256 = {
|
||||||
|
aarch64-linux = "131ydgndff7dyhkivfchbk43lv3cv2p172knkqilx64aapvk5qvy";
|
||||||
|
x86_64-darwin = "00i9gd1ny00681pwinh6ng9x45xsyrnwc6hm2vr348z9gasyxh00";
|
||||||
|
x86_64-linux = "0rv8r5zh0a5621v0xygxi8f6932qgwinw2s9vnniasp9z7897gl4";
|
||||||
|
}.${stdenv.hostPlatform.system} or (throw "Unsupported system: ${stdenv.hostPlatform.system}");
|
||||||
|
};
|
||||||
|
|
||||||
|
dontConfigure = true;
|
||||||
|
dontBuild = true;
|
||||||
|
dontPatchELF = true;
|
||||||
|
dontStrip = true;
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
mkdir -p $out
|
||||||
|
cp -r * $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
preFixup = ''
|
||||||
|
find $out -type f | while read f; do
|
||||||
|
patchelf "$f" > /dev/null 2>&1 || continue
|
||||||
|
patchelf --set-interpreter $(cat ${stdenv.cc}/nix-support/dynamic-linker) "$f" || true
|
||||||
|
patchelf --set-rpath ${lib.makeLibraryPath [ "$out" stdenv.cc.cc ncurses5 python38 ]} "$f" || true
|
||||||
|
done
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
description = "Pre-built GNU toolchain from ARM Cortex-M & Cortex-R processors";
|
||||||
|
homepage = "https://developer.arm.com/open-source/gnu-toolchain/gnu-rm";
|
||||||
|
license = with licenses; [ bsd2 gpl2 gpl3 lgpl21 lgpl3 mit ];
|
||||||
|
maintainers = with maintainers; [ prusnak ];
|
||||||
|
platforms = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" ];
|
||||||
|
};
|
||||||
|
}
|
@ -2,7 +2,7 @@
|
|||||||
, cmake, which, m4, python3, bison, flex, llvmPackages, ncurses
|
, cmake, which, m4, python3, bison, flex, llvmPackages, ncurses
|
||||||
|
|
||||||
# the default test target is sse4, but that is not supported by all Hydra agents
|
# the default test target is sse4, but that is not supported by all Hydra agents
|
||||||
, testedTargets ? [ "sse2-i32x4" ]
|
, testedTargets ? if stdenv.isAarch64 || stdenv.isAarch32 then [ "neon-i32x4" ] else [ "sse2-i32x4" ]
|
||||||
}:
|
}:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
@ -58,14 +58,15 @@ stdenv.mkDerivation rec {
|
|||||||
"-DCLANGPP_EXECUTABLE=${llvmPackages.clang}/bin/clang++"
|
"-DCLANGPP_EXECUTABLE=${llvmPackages.clang}/bin/clang++"
|
||||||
"-DISPC_INCLUDE_EXAMPLES=OFF"
|
"-DISPC_INCLUDE_EXAMPLES=OFF"
|
||||||
"-DISPC_INCLUDE_UTILS=OFF"
|
"-DISPC_INCLUDE_UTILS=OFF"
|
||||||
"-DARM_ENABLED=FALSE"
|
("-DARM_ENABLED=" + (if stdenv.isAarch64 || stdenv.isAarch32 then "TRUE" else "FALSE"))
|
||||||
|
("-DX86_ENABLED=" + (if stdenv.isx86_64 || stdenv.isx86_32 then "TRUE" else "FALSE"))
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
homepage = "https://ispc.github.io/";
|
homepage = "https://ispc.github.io/";
|
||||||
description = "Intel 'Single Program, Multiple Data' Compiler, a vectorised language";
|
description = "Intel 'Single Program, Multiple Data' Compiler, a vectorised language";
|
||||||
license = licenses.bsd3;
|
license = licenses.bsd3;
|
||||||
platforms = [ "x86_64-linux" "x86_64-darwin" ]; # TODO: buildable on more platforms?
|
platforms = [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ]; # TODO: buildable on more platforms?
|
||||||
maintainers = with maintainers; [ aristid thoughtpolice athas ];
|
maintainers = with maintainers; [ aristid thoughtpolice athas ];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -54,7 +54,7 @@ stdenv.mkDerivation rec {
|
|||||||
# https://github.com/JuliaCI/julia-buildbot/blob/master/master/inventory.py
|
# https://github.com/JuliaCI/julia-buildbot/blob/master/master/inventory.py
|
||||||
"JULIA_CPU_TARGET=generic;sandybridge,-xsaveopt,clone_all;haswell,-rdrnd,base(1)"
|
"JULIA_CPU_TARGET=generic;sandybridge,-xsaveopt,clone_all;haswell,-rdrnd,base(1)"
|
||||||
] ++ lib.optionals stdenv.isAarch64 [
|
] ++ lib.optionals stdenv.isAarch64 [
|
||||||
"JULIA_CPU_TERGET=generic;cortex-a57;thunderx2t99;armv8.2-a,crypto,fullfp16,lse,rdm"
|
"JULIA_CPU_TARGET=generic;cortex-a57;thunderx2t99;armv8.2-a,crypto,fullfp16,lse,rdm"
|
||||||
];
|
];
|
||||||
|
|
||||||
# remove forbidden reference to $TMPDIR
|
# remove forbidden reference to $TMPDIR
|
||||||
|
@ -260,7 +260,7 @@ let
|
|||||||
inherit lib stdenv makeWrapper buildRubyGem buildEnv;
|
inherit lib stdenv makeWrapper buildRubyGem buildEnv;
|
||||||
gemConfig = defaultGemConfig;
|
gemConfig = defaultGemConfig;
|
||||||
ruby = self;
|
ruby = self;
|
||||||
}) withPackages gems;
|
}) withPackages buildGems gems;
|
||||||
|
|
||||||
} // lib.optionalAttrs useBaseRuby {
|
} // lib.optionalAttrs useBaseRuby {
|
||||||
inherit baseRuby;
|
inherit baseRuby;
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
, gnatcoll-core
|
, gnatcoll-core
|
||||||
, gprbuild
|
, gprbuild
|
||||||
, python3
|
, python3
|
||||||
, why3
|
|
||||||
, ocaml
|
, ocaml
|
||||||
, ocamlPackages
|
, ocamlPackages
|
||||||
, makeWrapper
|
, makeWrapper
|
||||||
@ -53,11 +52,9 @@ stdenv.mkDerivation rec {
|
|||||||
make setup
|
make setup
|
||||||
'';
|
'';
|
||||||
|
|
||||||
postInstall = ''
|
installPhase = ''
|
||||||
|
make install-all
|
||||||
cp -a ./install/. $out
|
cp -a ./install/. $out
|
||||||
# help gnatprove to locate why3server
|
|
||||||
wrapProgram "$out/bin/gnatprove" \
|
|
||||||
--prefix PATH : "${why3}/lib/why3"
|
|
||||||
'';
|
'';
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
|
@ -13,10 +13,10 @@ Fix build issues on Darwin.
|
|||||||
define(`confLDOPTS', `${Extra_LD_Flags}')
|
define(`confLDOPTS', `${Extra_LD_Flags}')
|
||||||
--- a/sendmail/sendmail.h 2020-05-18 14:51:17.000000000 +0200
|
--- a/sendmail/sendmail.h 2020-05-18 14:51:17.000000000 +0200
|
||||||
+++ b/sendmail/sendmail.h 2020-05-18 14:51:00.000000000 +0200
|
+++ b/sendmail/sendmail.h 2020-05-18 14:51:00.000000000 +0200
|
||||||
@@ -104,7 +104,11 @@
|
@@ -122,7 +122,11 @@
|
||||||
# endif /* NETX25 */
|
# endif
|
||||||
|
|
||||||
# if NAMED_BIND
|
#if NAMED_BIND
|
||||||
-# include <arpa/nameser.h>
|
-# include <arpa/nameser.h>
|
||||||
+# ifdef __APPLE__
|
+# ifdef __APPLE__
|
||||||
+# include <arpa/nameser_compat.h>
|
+# include <arpa/nameser_compat.h>
|
||||||
@ -25,4 +25,4 @@ Fix build issues on Darwin.
|
|||||||
+# endif
|
+# endif
|
||||||
# ifdef NOERROR
|
# ifdef NOERROR
|
||||||
# undef NOERROR /* avoid <sys/streams.h> conflict */
|
# undef NOERROR /* avoid <sys/streams.h> conflict */
|
||||||
# endif /* NOERROR */
|
# endif
|
||||||
|
@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "libmilter";
|
pname = "libmilter";
|
||||||
version = "8.15.2";
|
version = "8.17.1";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "ftp://ftp.sendmail.org/pub/sendmail/sendmail.${version}.tar.gz";
|
url = "ftp://ftp.sendmail.org/pub/sendmail/sendmail.${version}.tar.gz";
|
||||||
sha256 = "0fdl9ndmspqspdlmghzxlaqk56j3yajk52d7jxcg21b7sxglpy94";
|
sha256 = "sha256-BLx2tsiG5tERvn/Y2qMrjOABKKKItrUuBnvCnzhUpuY=";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildPhase = ''
|
buildPhase = ''
|
||||||
@ -32,7 +32,7 @@ stdenv.mkDerivation rec {
|
|||||||
sh Build -f ./a.m4
|
sh Build -f ./a.m4
|
||||||
'';
|
'';
|
||||||
|
|
||||||
patches = [ ./install.patch ./sharedlib.patch ./glibc-2.30.patch ./darwin.patch ];
|
patches = [ ./install.patch ./sharedlib.patch ./darwin.patch ];
|
||||||
|
|
||||||
nativeBuildInputs = [ m4 ] ++ lib.optional stdenv.isDarwin fixDarwinDylibNames;
|
nativeBuildInputs = [ m4 ] ++ lib.optional stdenv.isDarwin fixDarwinDylibNames;
|
||||||
|
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
diff --git a/libmilter/sm_gethost.c b/libmilter/sm_gethost.c
|
|
||||||
index 2423c34..f00468c 100644
|
|
||||||
--- a/libmilter/sm_gethost.c
|
|
||||||
+++ b/libmilter/sm_gethost.c
|
|
||||||
@@ -52,16 +52,8 @@ sm_getipnodebyname(name, family, flags, err)
|
|
||||||
bool resv6 = true;
|
|
||||||
struct hostent *h;
|
|
||||||
|
|
||||||
- if (family == AF_INET6)
|
|
||||||
- {
|
|
||||||
- /* From RFC2133, section 6.1 */
|
|
||||||
- resv6 = bitset(RES_USE_INET6, _res.options);
|
|
||||||
- _res.options |= RES_USE_INET6;
|
|
||||||
- }
|
|
||||||
SM_SET_H_ERRNO(0);
|
|
||||||
- h = gethostbyname(name);
|
|
||||||
- if (family == AF_INET6 && !resv6)
|
|
||||||
- _res.options &= ~RES_USE_INET6;
|
|
||||||
+ h = gethostbyname2(name, family);
|
|
||||||
|
|
||||||
/* the function is supposed to return only the requested family */
|
|
||||||
if (h != NULL && h->h_addrtype != family)
|
|
||||||
diff --git a/sendmail/conf.c b/sendmail/conf.c
|
|
||||||
index c73334e..500dafb 100644
|
|
||||||
--- a/sendmail/conf.c
|
|
||||||
+++ b/sendmail/conf.c
|
|
||||||
@@ -4243,16 +4243,8 @@ sm_getipnodebyname(name, family, flags, err)
|
|
||||||
# else /* HAS_GETHOSTBYNAME2 */
|
|
||||||
bool resv6 = true;
|
|
||||||
|
|
||||||
- if (family == AF_INET6)
|
|
||||||
- {
|
|
||||||
- /* From RFC2133, section 6.1 */
|
|
||||||
- resv6 = bitset(RES_USE_INET6, _res.options);
|
|
||||||
- _res.options |= RES_USE_INET6;
|
|
||||||
- }
|
|
||||||
SM_SET_H_ERRNO(0);
|
|
||||||
- h = gethostbyname(name);
|
|
||||||
- if (!resv6)
|
|
||||||
- _res.options &= ~RES_USE_INET6;
|
|
||||||
+ h = gethostbyname2(name, family);
|
|
||||||
|
|
||||||
/* the function is supposed to return only the requested family */
|
|
||||||
if (h != NULL && h->h_addrtype != family)
|
|
@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "libpg_query";
|
pname = "libpg_query";
|
||||||
version = "14-3.0.0";
|
version = "15-4.0.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "pganalyze";
|
owner = "pganalyze";
|
||||||
repo = "libpg_query";
|
repo = "libpg_query";
|
||||||
rev = version;
|
rev = version;
|
||||||
sha256 = "sha256-rICN8fkPcYw32N6TdpbrszGUoRzwQdfRSW6A0AC8toM=";
|
sha256 = "sha256-2BZT/jGfGwia+Map5OkeTcWVFJssykhrdRT2IDAzrfs=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ which ];
|
nativeBuildInputs = [ which ];
|
||||||
|
@ -22,15 +22,16 @@ in
|
|||||||
|
|
||||||
buildPythonPackage rec {
|
buildPythonPackage rec {
|
||||||
pname = "python-arango";
|
pname = "python-arango";
|
||||||
version = "7.5.3";
|
version = "7.5.4";
|
||||||
disabled = pythonOlder "3.7";
|
|
||||||
format = "setuptools";
|
format = "setuptools";
|
||||||
|
|
||||||
|
disabled = pythonOlder "3.7";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "ArangoDB-Community";
|
owner = "ArangoDB-Community";
|
||||||
repo = "python-arango";
|
repo = "python-arango";
|
||||||
rev = version;
|
rev = "refs/tags/${version}";
|
||||||
sha256 = "0qb2yp05z8dmgsyyxqrl3q0a60jaiih96zhxmqrn2yf7as45n07j";
|
hash = "sha256-b3UZuH2hpulRSThReBkDwh0MLJmc95HeWInmmMAl4g0=";
|
||||||
};
|
};
|
||||||
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
@ -127,12 +128,15 @@ buildPythonPackage rec {
|
|||||||
"test_replication_applier"
|
"test_replication_applier"
|
||||||
];
|
];
|
||||||
|
|
||||||
pythonImportsCheck = [ "arango" ];
|
pythonImportsCheck = [
|
||||||
|
"arango"
|
||||||
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
description = "Python Driver for ArangoDB";
|
description = "Python Driver for ArangoDB";
|
||||||
homepage = "https://github.com/ArangoDB-Community/python-arango";
|
homepage = "https://github.com/ArangoDB-Community/python-arango";
|
||||||
|
changelog = "https://github.com/ArangoDB-Community/python-arango/releases/tag/${version}";
|
||||||
license = licenses.mit;
|
license = licenses.mit;
|
||||||
maintainers = [ maintainers.jsoo1 ];
|
maintainers = with maintainers; [ jsoo1 ];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -4,8 +4,8 @@ buildRubyGem rec {
|
|||||||
inherit ruby;
|
inherit ruby;
|
||||||
name = "${gemName}-${version}";
|
name = "${gemName}-${version}";
|
||||||
gemName = "bundler";
|
gemName = "bundler";
|
||||||
version = "2.3.26";
|
version = "2.4.2";
|
||||||
source.sha256 = "sha256-HuU832HnKK2Cxtv/Bs/NhVHVQi6I6GID8OLb6a6Zngk=";
|
source.sha256 = "sha256-mYUOxAWH7hv7Kn521OVI8PyzoO3T6LGPJjAxA07buR8=";
|
||||||
dontPatchShebangs = true;
|
dontPatchShebangs = true;
|
||||||
|
|
||||||
passthru.updateScript = writeScript "gem-update-script" ''
|
passthru.updateScript = writeScript "gem-update-script" ''
|
||||||
|
@ -2,16 +2,16 @@
|
|||||||
|
|
||||||
buildGoModule rec {
|
buildGoModule rec {
|
||||||
pname = "esbuild";
|
pname = "esbuild";
|
||||||
version = "0.16.7";
|
version = "0.16.13";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "evanw";
|
owner = "evanw";
|
||||||
repo = "esbuild";
|
repo = "esbuild";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "sha256-zo7YQ4Is3VWsXGvPNrg95tZ76qTSQRyntFjDeqhoyVw=";
|
hash = "sha256-X4UB2RDfupUP+u+4g2jLxbpx4n4uarhcjs5VtP9Zi20=";
|
||||||
};
|
};
|
||||||
|
|
||||||
vendorSha256 = "sha256-+BfxCyg0KkDQpHt/wycy/8CTG6YBA/VJvJFhhzUnSiQ=";
|
vendorHash = "sha256-+BfxCyg0KkDQpHt/wycy/8CTG6YBA/VJvJFhhzUnSiQ=";
|
||||||
|
|
||||||
subPackages = [ "cmd/esbuild" ];
|
subPackages = [ "cmd/esbuild" ];
|
||||||
|
|
||||||
|
@ -8,16 +8,16 @@
|
|||||||
|
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "ptags";
|
pname = "ptags";
|
||||||
version = "0.3.2";
|
version = "0.3.4";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "dalance";
|
owner = "dalance";
|
||||||
repo = "ptags";
|
repo = "ptags";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "1xr1szh4dfrcmi6s6dj791k1ix2zbv75rqkqbyb1lmh5548kywkg";
|
sha256 = "sha256-hFHzNdTX3nw2OwRxk9lKrt/YpaBXwi5aE/Qn3W9PRf4=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoSha256 = "1pz5hvn1iq26i8c2cmqavhnri8h0sn40khrxvcdkj9q47nsj5wcx";
|
cargoSha256 = "sha256-cFezB7uwUznC/8NXJNrBqP0lf0sXAQBoGksXFOGrUIg=";
|
||||||
|
|
||||||
nativeBuildInputs = [ makeWrapper ];
|
nativeBuildInputs = [ makeWrapper ];
|
||||||
|
|
||||||
|
@ -6,11 +6,11 @@ else
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "dune";
|
pname = "dune";
|
||||||
version = "3.6.1";
|
version = "3.6.2";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "https://github.com/ocaml/dune/releases/download/${version}/dune-${version}.tbz";
|
url = "https://github.com/ocaml/dune/releases/download/${version}/dune-${version}.tbz";
|
||||||
sha256 = "sha256-8dWsBLegJ/PVSeJc+IXr96zBNeApHBjmtDEjp5nBQ84=";
|
sha256 = "sha256-ttSrhI77BKoqMl0AFdMu1EFO1xMOx6oS+YFY7/RFzzw=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ ocaml findlib ];
|
nativeBuildInputs = [ ocaml findlib ];
|
||||||
|
@ -3,13 +3,13 @@
|
|||||||
|
|
||||||
buildPythonApplication rec {
|
buildPythonApplication rec {
|
||||||
pname = "rare";
|
pname = "rare";
|
||||||
version = "1.9.3";
|
version = "1.9.4";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "Dummerle";
|
owner = "Dummerle";
|
||||||
repo = "Rare";
|
repo = "Rare";
|
||||||
rev = version;
|
rev = "refs/tags/${version}";
|
||||||
sha256 = "sha256-M+OMsyamh4WHIx7Pv2sLylOrnSmYrv1aEm3atqXrDaw=";
|
sha256 = "sha256-+STwVsDdvjP7HaqmaQVug+6h0n0rw/j4LGQQSNdLVQQ=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
, lib
|
, lib
|
||||||
, fetchFromGitHub
|
, fetchFromGitHub
|
||||||
, python3Packages
|
, python3Packages
|
||||||
|
, bash
|
||||||
, dnsmasq
|
, dnsmasq
|
||||||
, gawk
|
, gawk
|
||||||
, getent
|
, getent
|
||||||
@ -17,14 +18,14 @@
|
|||||||
|
|
||||||
python3Packages.buildPythonApplication rec {
|
python3Packages.buildPythonApplication rec {
|
||||||
pname = "waydroid";
|
pname = "waydroid";
|
||||||
version = "1.3.3";
|
version = "1.3.4";
|
||||||
format = "other";
|
format = "other";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = pname;
|
owner = pname;
|
||||||
repo = pname;
|
repo = pname;
|
||||||
rev = version;
|
rev = version;
|
||||||
sha256 = "sha256-av1kcOSViUV2jsFiTE21N6sAJIL6K+zKkpPHjx6iYVk=";
|
sha256 = "sha256-0GBob9BUwiE5cFGdK8AdwsTjTOdc+AIWqUGN/gFfOqI=";
|
||||||
};
|
};
|
||||||
|
|
||||||
propagatedBuildInputs = with python3Packages; [
|
propagatedBuildInputs = with python3Packages; [
|
||||||
|
@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "pg_ivm";
|
pname = "pg_ivm";
|
||||||
version = "1.3";
|
version = "1.4";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "sraoss";
|
owner = "sraoss";
|
||||||
repo = pname;
|
repo = pname;
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
hash = "sha256-HdIqAB/A6+EvioKhS2OKmlABjpeTAgkbU5ihbt/OzdI=";
|
hash = "sha256-pz9eHmd7GC30r0uUObOlrcdkAX4c+szjYAXS1U999CE=";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ postgresql ];
|
buildInputs = [ postgresql ];
|
||||||
|
@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "pgroonga";
|
pname = "pgroonga";
|
||||||
version = "2.4.0";
|
version = "2.4.2";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "https://packages.groonga.org/source/${pname}/${pname}-${version}.tar.gz";
|
url = "https://packages.groonga.org/source/${pname}/${pname}-${version}.tar.gz";
|
||||||
sha256 = "sha256-W6quDn2B+BZ+J46aNMbtVq7OizT1q5jyKMZECAk0F7M=";
|
sha256 = "sha256-5klltU+9dz30tjE0lQfNinrVEZyT8UpK120kQ1j/yig=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ pkg-config ];
|
nativeBuildInputs = [ pkg-config ];
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "pgrouting";
|
pname = "pgrouting";
|
||||||
version = "3.3.2";
|
version = "3.4.2";
|
||||||
|
|
||||||
nativeBuildInputs = [ cmake perl ];
|
nativeBuildInputs = [ cmake perl ];
|
||||||
buildInputs = [ postgresql boost ];
|
buildInputs = [ postgresql boost ];
|
||||||
@ -11,7 +11,7 @@ stdenv.mkDerivation rec {
|
|||||||
owner = "pgRouting";
|
owner = "pgRouting";
|
||||||
repo = pname;
|
repo = pname;
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "sha256-H7h+eiH02qLscpiZ8yV5ofL7upeqRBXNQDGYS86f3og=";
|
sha256 = "sha256-By3XX4ow5+OdvpLlpozZe3674VSehO9T96pQtJy5y6g=";
|
||||||
};
|
};
|
||||||
|
|
||||||
installPhase = ''
|
installPhase = ''
|
||||||
|
@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "plpgsql_check";
|
pname = "plpgsql_check";
|
||||||
version = "2.2.4";
|
version = "2.2.6";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "okbob";
|
owner = "okbob";
|
||||||
repo = pname;
|
repo = pname;
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "sha256-YUJLh1IgOOnNxPrH8NaY3jGEV+4mTjRffooIANkbbFo=";
|
hash = "sha256-8HFyIzJ1iF3K2vTlibFallvkMKjFTJ2DO64fORToD8E=";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ postgresql ];
|
buildInputs = [ postgresql ];
|
||||||
|
@ -8,13 +8,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "vgmtools";
|
pname = "vgmtools";
|
||||||
version = "unstable-2022-12-03";
|
version = "unstable-2022-12-30";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "vgmrips";
|
owner = "vgmrips";
|
||||||
repo = "vgmtools";
|
repo = "vgmtools";
|
||||||
rev = "b9216623ffb9219c46a7a10669175c7a4c8cd946";
|
rev = "6c2c21dfc871f8cb9c33a77fe7db01419b6ad97d";
|
||||||
sha256 = "fPt/z4D4C8TWoz7FivxmXGDcYGc7sXWvxE0+CoyFgDQ=";
|
sha256 = "qe8cHGf8X7JjjoiRQ/S3q/WhyvgrMEwsCo7QoQkmg5w=";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
{ lib, stdenv, fetchurl, pkg-config, libcap, readline, texinfo, nss, nspr
|
{ lib, stdenv, fetchurl, pkg-config
|
||||||
, libseccomp, pps-tools, gnutls }:
|
, gnutls, libedit, nspr, nss, readline, texinfo
|
||||||
|
, libcap, libseccomp, pps-tools
|
||||||
|
}:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "chrony";
|
pname = "chrony";
|
||||||
@ -7,22 +9,33 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "https://download.tuxfamily.org/chrony/${pname}-${version}.tar.gz";
|
url = "https://download.tuxfamily.org/chrony/${pname}-${version}.tar.gz";
|
||||||
sha256 = "sha256-nQ2oiahl8ImlohYQ/7ZxPjyUOM4wOmO0nC+26v9biAQ=";
|
hash = "sha256-nQ2oiahl8ImlohYQ/7ZxPjyUOM4wOmO0nC+26v9biAQ=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
outputs = [ "out" "man" ];
|
||||||
|
|
||||||
|
nativeBuildInputs = [ pkg-config ];
|
||||||
|
|
||||||
|
buildInputs = [ gnutls libedit nspr nss readline texinfo ]
|
||||||
|
++ lib.optionals stdenv.isLinux [ libcap libseccomp pps-tools ];
|
||||||
|
|
||||||
|
configureFlags = [
|
||||||
|
"--enable-ntp-signd"
|
||||||
|
"--sbindir=$(out)/bin"
|
||||||
|
"--chronyrundir=/run/chrony"
|
||||||
|
] ++ lib.optional stdenv.isLinux "--enable-scfilter";
|
||||||
|
|
||||||
|
patches = [
|
||||||
|
# Cleanup the installation script
|
||||||
|
./makefile.patch
|
||||||
|
];
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
patchShebangs test
|
patchShebangs test
|
||||||
'';
|
'';
|
||||||
|
|
||||||
buildInputs = [ readline texinfo nss nspr gnutls ]
|
|
||||||
++ lib.optionals stdenv.isLinux [ libcap libseccomp pps-tools ];
|
|
||||||
nativeBuildInputs = [ pkg-config ];
|
|
||||||
|
|
||||||
hardeningEnable = [ "pie" ];
|
hardeningEnable = [ "pie" ];
|
||||||
|
|
||||||
configureFlags = [ "--chronyvardir=$(out)/var/lib/chrony" "--enable-ntp-signd" ]
|
|
||||||
++ lib.optional stdenv.isLinux "--enable-scfilter";
|
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
description = "Sets your computer's clock from time servers on the Net";
|
description = "Sets your computer's clock from time servers on the Net";
|
||||||
homepage = "https://chrony.tuxfamily.org/";
|
homepage = "https://chrony.tuxfamily.org/";
|
||||||
|
23
pkgs/tools/networking/chrony/makefile.patch
Normal file
23
pkgs/tools/networking/chrony/makefile.patch
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
diff --git a/Makefile.in b/Makefile.in
|
||||||
|
index ef100a4..47f54f4 100644
|
||||||
|
--- a/Makefile.in
|
||||||
|
+++ b/Makefile.in
|
||||||
|
@@ -23,7 +23,7 @@
|
||||||
|
|
||||||
|
SYSCONFDIR = @SYSCONFDIR@
|
||||||
|
BINDIR = @BINDIR@
|
||||||
|
-SBINDIR = @SBINDIR@
|
||||||
|
+SBINDIR = @BINDIR@
|
||||||
|
LOCALSTATEDIR = @LOCALSTATEDIR@
|
||||||
|
CHRONYVARDIR = @CHRONYVARDIR@
|
||||||
|
DESTDIR =
|
||||||
|
@@ -86,9 +86,7 @@ getdate :
|
||||||
|
|
||||||
|
install: chronyd chronyc
|
||||||
|
[ -d $(DESTDIR)$(SYSCONFDIR) ] || mkdir -p $(DESTDIR)$(SYSCONFDIR)
|
||||||
|
- [ -d $(DESTDIR)$(SBINDIR) ] || mkdir -p $(DESTDIR)$(SBINDIR)
|
||||||
|
[ -d $(DESTDIR)$(BINDIR) ] || mkdir -p $(DESTDIR)$(BINDIR)
|
||||||
|
- [ -d $(DESTDIR)$(CHRONYVARDIR) ] || mkdir -p $(DESTDIR)$(CHRONYVARDIR)
|
||||||
|
if [ -f $(DESTDIR)$(SBINDIR)/chronyd ]; then rm -f $(DESTDIR)$(SBINDIR)/chronyd ; fi
|
||||||
|
if [ -f $(DESTDIR)$(BINDIR)/chronyc ]; then rm -f $(DESTDIR)$(BINDIR)/chronyc ; fi
|
||||||
|
cp chronyd $(DESTDIR)$(SBINDIR)/chronyd
|
@ -1,24 +1,22 @@
|
|||||||
{ lib, stdenv, fetchFromGitHub, rustPlatform, pkg-config, openssl, CoreServices, libiconv }:
|
{ lib, stdenv, fetchFromGitHub, rustPlatform, pkg-config, openssl, Security, CoreServices }:
|
||||||
|
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "shadowsocks-rust";
|
pname = "shadowsocks-rust";
|
||||||
version = "1.14.3";
|
version = "1.15.2";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
owner = "shadowsocks";
|
owner = "shadowsocks";
|
||||||
repo = pname;
|
repo = pname;
|
||||||
sha256 = "sha256-tRiziyCw1Qpm22RtZHeKt4VFReJidFHsPxPSjxIA3hA=";
|
hash = "sha256-CvAOvtC5U2njQuUjFxjnGeqhuxrCw4XI6goo1TxIhIU=";
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoSha256 = "sha256-snnzNb1yJ8L5pMvNNEIf5hZOpFV6DKOWGtGP1T3YTWg=";
|
cargoHash = "sha256-ctZlYo82M7GKVvrEkw/7+aH9R0MeEsyv3IKl9k4SbiA=";
|
||||||
|
|
||||||
RUSTC_BOOTSTRAP = 1;
|
nativeBuildInputs = lib.optionals stdenv.isLinux [ pkg-config ];
|
||||||
|
|
||||||
nativeBuildInputs = [ pkg-config ];
|
buildInputs = lib.optionals stdenv.isLinux [ openssl ]
|
||||||
|
++ lib.optionals stdenv.isDarwin [ Security CoreServices ];
|
||||||
buildInputs = [ openssl ]
|
|
||||||
++ lib.optionals stdenv.isDarwin [ CoreServices libiconv ];
|
|
||||||
|
|
||||||
cargoBuildFlags = [
|
cargoBuildFlags = [
|
||||||
"--features=aead-cipher-extra,local-dns,local-http-native-tls,local-redir,local-tun"
|
"--features=aead-cipher-extra,local-dns,local-http-native-tls,local-redir,local-tun"
|
||||||
@ -36,8 +34,9 @@ rustPlatform.buildRustPackage rec {
|
|||||||
];
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
|
description = "A Rust port of Shadowsocks";
|
||||||
homepage = "https://github.com/shadowsocks/shadowsocks-rust";
|
homepage = "https://github.com/shadowsocks/shadowsocks-rust";
|
||||||
description = "A Rust port of shadowsocks";
|
changelog = "https://github.com/shadowsocks/shadowsocks-rust/raw/v${version}/debian/changelog";
|
||||||
license = licenses.mit;
|
license = licenses.mit;
|
||||||
maintainers = [ maintainers.marsam ];
|
maintainers = [ maintainers.marsam ];
|
||||||
};
|
};
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
{ lib, stdenv, fetchFromGitHub, rustPlatform, darwin }:
|
{ lib, stdenv, fetchFromGitHub, fetchpatch, rustPlatform, darwin }:
|
||||||
|
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "nixdoc";
|
pname = "nixdoc";
|
||||||
@ -11,6 +11,14 @@ rustPlatform.buildRustPackage rec {
|
|||||||
sha256 = "14d4dq06jdqazxvv7fq5872zy0capxyb0fdkp8qg06gxl1iw201s";
|
sha256 = "14d4dq06jdqazxvv7fq5872zy0capxyb0fdkp8qg06gxl1iw201s";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
patches = [
|
||||||
|
# Support nested identifiers https://github.com/nix-community/nixdoc/pull/27
|
||||||
|
(fetchpatch {
|
||||||
|
url = "https://github.com/nix-community/nixdoc/pull/27/commits/ea542735bf675fe2ccd37edaffb9138d1a8c1b7e.patch";
|
||||||
|
sha256 = "1fmz44jv2r9qsnjxvkkjfb0safy69l4x4vx1g5gisrp8nwdn94rj";
|
||||||
|
})
|
||||||
|
];
|
||||||
|
|
||||||
buildInputs = lib.optionals stdenv.isDarwin [ darwin.Security ];
|
buildInputs = lib.optionals stdenv.isDarwin [ darwin.Security ];
|
||||||
|
|
||||||
cargoSha256 = "1nv6g8rmjjbwqmjkrpqncypqvx5c7xp2zlx5h6rw2j9d1wlys0v5";
|
cargoSha256 = "1nv6g8rmjjbwqmjkrpqncypqvx5c7xp2zlx5h6rw2j9d1wlys0v5";
|
||||||
|
@ -57,7 +57,8 @@ stdenv.mkDerivation rec {
|
|||||||
"--with-ksba-prefix=${libksba.dev}"
|
"--with-ksba-prefix=${libksba.dev}"
|
||||||
"--with-npth-prefix=${npth}"
|
"--with-npth-prefix=${npth}"
|
||||||
] ++ lib.optional guiSupport "--with-pinentry-pgm=${pinentry}/${pinentryBinaryPath}"
|
] ++ lib.optional guiSupport "--with-pinentry-pgm=${pinentry}/${pinentryBinaryPath}"
|
||||||
++ lib.optional withTpm2Tss "--with-tss=intel";
|
++ lib.optional withTpm2Tss "--with-tss=intel"
|
||||||
|
++ lib.optional stdenv.isDarwin "--disable-ccid-driver";
|
||||||
postInstall = if enableMinimal
|
postInstall = if enableMinimal
|
||||||
then ''
|
then ''
|
||||||
rm -r $out/{libexec,sbin,share}
|
rm -r $out/{libexec,sbin,share}
|
||||||
|
@ -633,6 +633,7 @@ mapAliases ({
|
|||||||
|
|
||||||
### I ###
|
### I ###
|
||||||
|
|
||||||
|
i3-gaps = i3; # Added 2023-01-03
|
||||||
i3cat = throw "i3cat has been dropped due to the lack of maintanence from upstream since 2016"; # Added 2022-06-02
|
i3cat = throw "i3cat has been dropped due to the lack of maintanence from upstream since 2016"; # Added 2022-06-02
|
||||||
iana_etc = throw "'iana_etc' has been renamed to/replaced by 'iana-etc'"; # Converted to throw 2022-02-22
|
iana_etc = throw "'iana_etc' has been renamed to/replaced by 'iana-etc'"; # Converted to throw 2022-02-22
|
||||||
iasl = throw "iasl has been removed, use acpica-tools instead"; # Added 2021-08-08
|
iasl = throw "iasl has been removed, use acpica-tools instead"; # Added 2021-08-08
|
||||||
|
@ -1543,6 +1543,8 @@ with pkgs;
|
|||||||
|
|
||||||
### APPLICATIONS/VERSION-MANAGEMENT
|
### APPLICATIONS/VERSION-MANAGEMENT
|
||||||
|
|
||||||
|
deepgit = callPackage ../applications/version-management/deepgit {};
|
||||||
|
|
||||||
git = callPackage ../applications/version-management/git {
|
git = callPackage ../applications/version-management/git {
|
||||||
inherit (darwin.apple_sdk.frameworks) CoreServices Security;
|
inherit (darwin.apple_sdk.frameworks) CoreServices Security;
|
||||||
perlLibs = [perlPackages.LWP perlPackages.URI perlPackages.TermReadKey];
|
perlLibs = [perlPackages.LWP perlPackages.URI perlPackages.TermReadKey];
|
||||||
@ -11594,7 +11596,7 @@ with pkgs;
|
|||||||
shabnam-fonts = callPackage ../data/fonts/shabnam-fonts { };
|
shabnam-fonts = callPackage ../data/fonts/shabnam-fonts { };
|
||||||
|
|
||||||
shadowsocks-rust = callPackage ../tools/networking/shadowsocks-rust {
|
shadowsocks-rust = callPackage ../tools/networking/shadowsocks-rust {
|
||||||
inherit (darwin.apple_sdk.frameworks) CoreServices;
|
inherit (darwin.apple_sdk.frameworks) Security CoreServices;
|
||||||
};
|
};
|
||||||
|
|
||||||
shadowsocks-v2ray-plugin = callPackage ../tools/networking/shadowsocks-v2ray-plugin { };
|
shadowsocks-v2ray-plugin = callPackage ../tools/networking/shadowsocks-v2ray-plugin { };
|
||||||
@ -14521,7 +14523,8 @@ with pkgs;
|
|||||||
gcc-arm-embedded-9 = callPackage ../development/compilers/gcc-arm-embedded/9 {};
|
gcc-arm-embedded-9 = callPackage ../development/compilers/gcc-arm-embedded/9 {};
|
||||||
gcc-arm-embedded-10 = callPackage ../development/compilers/gcc-arm-embedded/10 {};
|
gcc-arm-embedded-10 = callPackage ../development/compilers/gcc-arm-embedded/10 {};
|
||||||
gcc-arm-embedded-11 = callPackage ../development/compilers/gcc-arm-embedded/11 {};
|
gcc-arm-embedded-11 = callPackage ../development/compilers/gcc-arm-embedded/11 {};
|
||||||
gcc-arm-embedded = gcc-arm-embedded-11;
|
gcc-arm-embedded-12 = callPackage ../development/compilers/gcc-arm-embedded/12 {};
|
||||||
|
gcc-arm-embedded = gcc-arm-embedded-12;
|
||||||
|
|
||||||
# Has to match the default gcc so that there are no linking errors when
|
# Has to match the default gcc so that there are no linking errors when
|
||||||
# using C/C++ libraries in D packages
|
# using C/C++ libraries in D packages
|
||||||
@ -27108,7 +27111,9 @@ with pkgs;
|
|||||||
nordzy-icon-theme = callPackage ../data/icons/nordzy-icon-theme { };
|
nordzy-icon-theme = callPackage ../data/icons/nordzy-icon-theme { };
|
||||||
|
|
||||||
inherit (callPackages ../data/fonts/noto-fonts {})
|
inherit (callPackages ../data/fonts/noto-fonts {})
|
||||||
|
mkNoto
|
||||||
noto-fonts
|
noto-fonts
|
||||||
|
noto-fonts-lgc-plus
|
||||||
noto-fonts-cjk-sans
|
noto-fonts-cjk-sans
|
||||||
noto-fonts-cjk-serif
|
noto-fonts-cjk-serif
|
||||||
noto-fonts-emoji
|
noto-fonts-emoji
|
||||||
@ -29846,8 +29851,6 @@ with pkgs;
|
|||||||
|
|
||||||
i3-auto-layout = callPackage ../applications/window-managers/i3/auto-layout.nix { };
|
i3-auto-layout = callPackage ../applications/window-managers/i3/auto-layout.nix { };
|
||||||
|
|
||||||
i3-gaps = callPackage ../applications/window-managers/i3/gaps.nix { };
|
|
||||||
|
|
||||||
i3-rounded = callPackage ../applications/window-managers/i3/rounded.nix { };
|
i3-rounded = callPackage ../applications/window-managers/i3/rounded.nix { };
|
||||||
|
|
||||||
i3altlayout = callPackage ../applications/window-managers/i3/altlayout.nix { };
|
i3altlayout = callPackage ../applications/window-managers/i3/altlayout.nix { };
|
||||||
|
Loading…
Reference in New Issue
Block a user