Merge branch 'staging-next' into staging
This commit is contained in:
commit
8d502fd425
7
doc/.gitignore
vendored
7
doc/.gitignore
vendored
@ -1,7 +1,8 @@
|
||||
*.chapter.xml
|
||||
*.section.xml
|
||||
.version
|
||||
out
|
||||
manual-full.xml
|
||||
highlightjs
|
||||
functions/library/generated
|
||||
functions/library/locations.xml
|
||||
highlightjs
|
||||
manual-full.xml
|
||||
out
|
||||
|
@ -197,20 +197,14 @@ args.stdenv.mkDerivation (args // {
|
||||
<title>Package naming</title>
|
||||
|
||||
<para>
|
||||
The key words
|
||||
<emphasis>must</emphasis>,
|
||||
<emphasis>must not</emphasis>,
|
||||
<emphasis>required</emphasis>,
|
||||
<emphasis>shall</emphasis>,
|
||||
<emphasis>shall not</emphasis>,
|
||||
<emphasis>should</emphasis>,
|
||||
<emphasis>should not</emphasis>,
|
||||
<emphasis>recommended</emphasis>,
|
||||
<emphasis>may</emphasis>,
|
||||
and <emphasis>optional</emphasis> in this section
|
||||
are to be interpreted as described in
|
||||
<link xlink:href="https://tools.ietf.org/html/rfc2119">RFC 2119</link>.
|
||||
Only <emphasis>emphasized</emphasis> words are to be interpreted in this way.
|
||||
The key words <emphasis>must</emphasis>, <emphasis>must not</emphasis>,
|
||||
<emphasis>required</emphasis>, <emphasis>shall</emphasis>, <emphasis>shall
|
||||
not</emphasis>, <emphasis>should</emphasis>, <emphasis>should
|
||||
not</emphasis>, <emphasis>recommended</emphasis>, <emphasis>may</emphasis>,
|
||||
and <emphasis>optional</emphasis> in this section are to be interpreted as
|
||||
described in <link xlink:href="https://tools.ietf.org/html/rfc2119">RFC
|
||||
2119</link>. Only <emphasis>emphasized</emphasis> words are to be
|
||||
interpreted in this way.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@ -253,15 +247,15 @@ args.stdenv.mkDerivation (args // {
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
The <literal>name</literal> attribute <emphasis>should</emphasis>
|
||||
be identical to the upstream package name.
|
||||
The <literal>name</literal> attribute <emphasis>should</emphasis> be
|
||||
identical to the upstream package name.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
The <literal>name</literal> attribute <emphasis>must not</emphasis>
|
||||
contain uppercase letters — e.g., <literal>"mplayer-1.0rc2"</literal>
|
||||
instead of <literal>"MPlayer-1.0rc2"</literal>.
|
||||
The <literal>name</literal> attribute <emphasis>must not</emphasis>
|
||||
contain uppercase letters — e.g., <literal>"mplayer-1.0rc2"</literal>
|
||||
instead of <literal>"MPlayer-1.0rc2"</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
@ -275,28 +269,29 @@ args.stdenv.mkDerivation (args // {
|
||||
<para>
|
||||
If a package is not a release but a commit from a repository, then the
|
||||
version part of the name <emphasis>must</emphasis> be the date of that
|
||||
(fetched) commit. The date <emphasis>must</emphasis> be in <literal>"YYYY-MM-DD"</literal>
|
||||
format. Also append <literal>"unstable"</literal> to the name - e.g.,
|
||||
(fetched) commit. The date <emphasis>must</emphasis> be in
|
||||
<literal>"YYYY-MM-DD"</literal> format. Also append
|
||||
<literal>"unstable"</literal> to the name - e.g.,
|
||||
<literal>"pkgname-unstable-2014-09-23"</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
Dashes in the package name <emphasis>should</emphasis> be preserved in new variable names,
|
||||
rather than converted to underscores or camel cased — e.g.,
|
||||
<varname>http-parser</varname> instead of <varname>http_parser</varname>
|
||||
or <varname>httpParser</varname>. The hyphenated style is preferred in
|
||||
all three package names.
|
||||
Dashes in the package name <emphasis>should</emphasis> be preserved in
|
||||
new variable names, rather than converted to underscores or camel cased
|
||||
— e.g., <varname>http-parser</varname> instead of
|
||||
<varname>http_parser</varname> or <varname>httpParser</varname>. The
|
||||
hyphenated style is preferred in all three package names.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
If there are multiple versions of a package, this <emphasis>should</emphasis> be reflected in
|
||||
the variable names in <filename>all-packages.nix</filename>, e.g.
|
||||
<varname>json-c-0-9</varname> and <varname>json-c-0-11</varname>. If
|
||||
there is an obvious “default” version, make an attribute like
|
||||
<literal>json-c = json-c-0-9;</literal>. See also
|
||||
<xref linkend="sec-versioning" />
|
||||
If there are multiple versions of a package, this
|
||||
<emphasis>should</emphasis> be reflected in the variable names in
|
||||
<filename>all-packages.nix</filename>, e.g. <varname>json-c-0-9</varname>
|
||||
and <varname>json-c-0-11</varname>. If there is an obvious “default”
|
||||
version, make an attribute like <literal>json-c = json-c-0-9;</literal>.
|
||||
See also <xref linkend="sec-versioning" />
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
@ -814,8 +809,8 @@ args.stdenv.mkDerivation (args // {
|
||||
|
||||
<para>
|
||||
There are multiple ways to fetch a package source in nixpkgs. The general
|
||||
guideline is that you should package reproducible sources with a high degree of
|
||||
availability. Right now there is only one fetcher which has mirroring
|
||||
guideline is that you should package reproducible sources with a high degree
|
||||
of availability. Right now there is only one fetcher which has mirroring
|
||||
support and that is <literal>fetchurl</literal>. Note that you should also
|
||||
prefer protocols which have a corresponding proxy environment variable.
|
||||
</para>
|
||||
@ -869,8 +864,10 @@ src = fetchFromGitHub {
|
||||
}
|
||||
</programlisting>
|
||||
Find the value to put as <literal>sha256</literal> by running
|
||||
<literal>nix run -f '<nixpkgs>' nix-prefetch-github -c nix-prefetch-github --rev 1f795f9f44607cc5bec70d1300150bfefcef2aae NixOS nix</literal>
|
||||
or <literal>nix-prefetch-url --unpack https://github.com/NixOS/nix/archive/1f795f9f44607cc5bec70d1300150bfefcef2aae.tar.gz</literal>.
|
||||
<literal>nix run -f '<nixpkgs>' nix-prefetch-github -c
|
||||
nix-prefetch-github --rev 1f795f9f44607cc5bec70d1300150bfefcef2aae NixOS
|
||||
nix</literal> or <literal>nix-prefetch-url --unpack
|
||||
https://github.com/NixOS/nix/archive/1f795f9f44607cc5bec70d1300150bfefcef2aae.tar.gz</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
@ -953,17 +950,23 @@ $ nix-hash --type sha256 --to-base32 <replaceable>HASH</replaceable>
|
||||
would be replace hash with a fake one and rebuild. Nix build will fail and
|
||||
error message will contain desired hash.
|
||||
</para>
|
||||
<warning><para>This method has security problems. Check below for details.</para></warning>
|
||||
<warning>
|
||||
<para>
|
||||
This method has security problems. Check below for details.
|
||||
</para>
|
||||
</warning>
|
||||
</listitem>
|
||||
</orderedlist>
|
||||
|
||||
<section xml:id="sec-source-hashes-security">
|
||||
<title>Obtaining hashes securely</title>
|
||||
|
||||
<para>
|
||||
Let's say Man-in-the-Middle (MITM) sits close to your network. Then instead of fetching
|
||||
source you can fetch malware, and instead of source hash you get hash of malware. Here are
|
||||
security considerations for this scenario:
|
||||
Let's say Man-in-the-Middle (MITM) sits close to your network. Then instead
|
||||
of fetching source you can fetch malware, and instead of source hash you
|
||||
get hash of malware. Here are security considerations for this scenario:
|
||||
</para>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
@ -972,7 +975,8 @@ $ nix-hash --type sha256 --to-base32 <replaceable>HASH</replaceable>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
hashes from upstream (in method 3) should be obtained via secure protocol;
|
||||
hashes from upstream (in method 3) should be obtained via secure
|
||||
protocol;
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
@ -982,12 +986,12 @@ $ nix-hash --type sha256 --to-base32 <replaceable>HASH</replaceable>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
<literal>https://</literal> URLs are not secure in method 5. When obtaining hashes
|
||||
with fake hash method, TLS checks are disabled. So
|
||||
refetch source hash from several different networks to exclude MITM scenario.
|
||||
Alternatively, use fake hash method to make Nix error, but instead of extracting
|
||||
hash from error, extract <literal>https://</literal> URL and prefetch it
|
||||
with method 1.
|
||||
<literal>https://</literal> URLs are not secure in method 5. When
|
||||
obtaining hashes with fake hash method, TLS checks are disabled. So
|
||||
refetch source hash from several different networks to exclude MITM
|
||||
scenario. Alternatively, use fake hash method to make Nix error, but
|
||||
instead of extracting hash from error, extract
|
||||
<literal>https://</literal> URL and prefetch it with method 1.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
@ -132,13 +132,13 @@
|
||||
</itemizedlist>
|
||||
|
||||
<para>
|
||||
The difference between a package being unsupported on some system and
|
||||
being broken is admittedly a bit fuzzy. If a program
|
||||
<emphasis>ought</emphasis> to work on a certain platform, but doesn't, the
|
||||
platform should be included in <literal>meta.platforms</literal>, but marked
|
||||
as broken with e.g. <literal>meta.broken =
|
||||
!hostPlatform.isWindows</literal>. Of course, this begs the question of what
|
||||
"ought" means exactly. That is left to the package maintainer.
|
||||
The difference between a package being unsupported on some system and being
|
||||
broken is admittedly a bit fuzzy. If a program <emphasis>ought</emphasis> to
|
||||
work on a certain platform, but doesn't, the platform should be included in
|
||||
<literal>meta.platforms</literal>, but marked as broken with e.g.
|
||||
<literal>meta.broken = !hostPlatform.isWindows</literal>. Of course, this
|
||||
begs the question of what "ought" means exactly. That is left to the package
|
||||
maintainer.
|
||||
</para>
|
||||
</section>
|
||||
<section xml:id="sec-allow-unfree">
|
||||
@ -175,9 +175,8 @@
|
||||
</programlisting>
|
||||
</para>
|
||||
<para>
|
||||
For a more useful example, try the following. This configuration
|
||||
only allows unfree packages named flash player and visual studio
|
||||
code:
|
||||
For a more useful example, try the following. This configuration only
|
||||
allows unfree packages named flash player and visual studio code:
|
||||
<programlisting>
|
||||
{
|
||||
allowUnfreePredicate = (pkg: builtins.elem
|
||||
|
@ -6,17 +6,17 @@
|
||||
<title>Introduction</title>
|
||||
|
||||
<para>
|
||||
"Cross-compilation" means compiling a program on one machine for another type
|
||||
of machine. For example, a typical use of cross-compilation is to compile
|
||||
programs for embedded devices. These devices often don't have the computing
|
||||
power and memory to compile their own programs. One might think that
|
||||
cross-compilation is a fairly niche concern. However, there are significant
|
||||
advantages to rigorously distinguishing between build-time and run-time
|
||||
environments! This applies even when one is developing and deploying on the
|
||||
same machine. Nixpkgs is increasingly adopting the opinion that packages
|
||||
should be written with cross-compilation in mind, and nixpkgs should evaluate
|
||||
in a similar way (by minimizing cross-compilation-specific special cases)
|
||||
whether or not one is cross-compiling.
|
||||
"Cross-compilation" means compiling a program on one machine for another
|
||||
type of machine. For example, a typical use of cross-compilation is to
|
||||
compile programs for embedded devices. These devices often don't have the
|
||||
computing power and memory to compile their own programs. One might think
|
||||
that cross-compilation is a fairly niche concern. However, there are
|
||||
significant advantages to rigorously distinguishing between build-time and
|
||||
run-time environments! This applies even when one is developing and
|
||||
deploying on the same machine. Nixpkgs is increasingly adopting the opinion
|
||||
that packages should be written with cross-compilation in mind, and nixpkgs
|
||||
should evaluate in a similar way (by minimizing cross-compilation-specific
|
||||
special cases) whether or not one is cross-compiling.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@ -34,15 +34,16 @@
|
||||
<title>Platform parameters</title>
|
||||
|
||||
<para>
|
||||
Nixpkgs follows the <link
|
||||
Nixpkgs follows the
|
||||
<link
|
||||
xlink:href="https://gcc.gnu.org/onlinedocs/gccint/Configure-Terms.html">conventions
|
||||
of GNU autoconf</link>. We distinguish between 3 types of platforms when
|
||||
building a derivation: <wordasword>build</wordasword>,
|
||||
<wordasword>host</wordasword>, and <wordasword>target</wordasword>. In
|
||||
summary, <wordasword>build</wordasword> is the platform on which a package
|
||||
is being built, <wordasword>host</wordasword> is the platform on which it
|
||||
will run. The third attribute, <wordasword>target</wordasword>, is relevant
|
||||
only for certain specific compilers and build tools.
|
||||
of GNU autoconf</link>. We distinguish between 3 types of platforms when
|
||||
building a derivation: <wordasword>build</wordasword>,
|
||||
<wordasword>host</wordasword>, and <wordasword>target</wordasword>. In
|
||||
summary, <wordasword>build</wordasword> is the platform on which a package
|
||||
is being built, <wordasword>host</wordasword> is the platform on which it
|
||||
will run. The third attribute, <wordasword>target</wordasword>, is relevant
|
||||
only for certain specific compilers and build tools.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@ -95,10 +96,10 @@
|
||||
The build process of certain compilers is written in such a way that the
|
||||
compiler resulting from a single build can itself only produce binaries
|
||||
for a single platform. The task of specifying this single "target
|
||||
platform" is thus pushed to build time of the compiler. The root cause of
|
||||
this is that the compiler (which will be run on the host) and the standard
|
||||
library/runtime (which will be run on the target) are built by a single
|
||||
build process.
|
||||
platform" is thus pushed to build time of the compiler. The root cause
|
||||
of this is that the compiler (which will be run on the host) and the
|
||||
standard library/runtime (which will be run on the target) are built by
|
||||
a single build process.
|
||||
</para>
|
||||
<para>
|
||||
There is no fundamental need to think about a single target ahead of
|
||||
@ -136,9 +137,9 @@
|
||||
This is a two-component shorthand for the platform. Examples of this
|
||||
would be "x86_64-darwin" and "i686-linux"; see
|
||||
<literal>lib.systems.doubles</literal> for more. The first component
|
||||
corresponds to the CPU architecture of the platform and the second to the
|
||||
operating system of the platform (<literal>[cpu]-[os]</literal>). This
|
||||
format has built-in support in Nix, such as the
|
||||
corresponds to the CPU architecture of the platform and the second to
|
||||
the operating system of the platform (<literal>[cpu]-[os]</literal>).
|
||||
This format has built-in support in Nix, such as the
|
||||
<varname>builtins.currentSystem</varname> impure string.
|
||||
</para>
|
||||
</listitem>
|
||||
@ -149,14 +150,14 @@
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is a 3- or 4- component shorthand for the platform. Examples of this
|
||||
would be <literal>x86_64-unknown-linux-gnu</literal> and
|
||||
This is a 3- or 4- component shorthand for the platform. Examples of
|
||||
this would be <literal>x86_64-unknown-linux-gnu</literal> and
|
||||
<literal>aarch64-apple-darwin14</literal>. This is a standard format
|
||||
called the "LLVM target triple", as they are pioneered by LLVM. In the
|
||||
4-part form, this corresponds to
|
||||
<literal>[cpu]-[vendor]-[os]-[abi]</literal>. This format is strictly
|
||||
more informative than the "Nix host double", as the previous format could
|
||||
analogously be termed. This needs a better name than
|
||||
more informative than the "Nix host double", as the previous format
|
||||
could analogously be termed. This needs a better name than
|
||||
<varname>config</varname>!
|
||||
</para>
|
||||
</listitem>
|
||||
@ -167,11 +168,10 @@
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is a Nix representation of a parsed LLVM target triple
|
||||
with white-listed components. This can be specified directly,
|
||||
or actually parsed from the <varname>config</varname>. See
|
||||
<literal>lib.systems.parse</literal> for the exact
|
||||
representation.
|
||||
This is a Nix representation of a parsed LLVM target triple with
|
||||
white-listed components. This can be specified directly, or actually
|
||||
parsed from the <varname>config</varname>. See
|
||||
<literal>lib.systems.parse</literal> for the exact representation.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -253,15 +253,15 @@
|
||||
<para>
|
||||
Some examples will make this clearer. If a package is being built with a
|
||||
<literal>(build, host, target)</literal> platform triple of <literal>(foo,
|
||||
bar, bar)</literal>, then its build-time dependencies would have a triple of
|
||||
<literal>(foo, foo, bar)</literal>, and <emphasis>those packages'</emphasis>
|
||||
build-time dependencies would have a triple of <literal>(foo, foo,
|
||||
foo)</literal>. In other words, it should take two "rounds" of following
|
||||
build-time dependency edges before one reaches a fixed point where, by the
|
||||
sliding window principle, the platform triple no longer changes. Indeed,
|
||||
this happens with cross-compilation, where only rounds of native
|
||||
dependencies starting with the second necessarily coincide with native
|
||||
packages.
|
||||
bar, bar)</literal>, then its build-time dependencies would have a triple
|
||||
of <literal>(foo, foo, bar)</literal>, and <emphasis>those
|
||||
packages'</emphasis> build-time dependencies would have a triple of
|
||||
<literal>(foo, foo, foo)</literal>. In other words, it should take two
|
||||
"rounds" of following build-time dependency edges before one reaches a
|
||||
fixed point where, by the sliding window principle, the platform triple no
|
||||
longer changes. Indeed, this happens with cross-compilation, where only
|
||||
rounds of native dependencies starting with the second necessarily coincide
|
||||
with native packages.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
@ -273,23 +273,24 @@
|
||||
</note>
|
||||
|
||||
<para>
|
||||
How does this work in practice? Nixpkgs is now structured so that build-time
|
||||
dependencies are taken from <varname>buildPackages</varname>, whereas
|
||||
run-time dependencies are taken from the top level attribute set. For
|
||||
example, <varname>buildPackages.gcc</varname> should be used at build-time,
|
||||
while <varname>gcc</varname> should be used at run-time. Now, for most of
|
||||
Nixpkgs's history, there was no <varname>buildPackages</varname>, and most
|
||||
packages have not been refactored to use it explicitly. Instead, one can use
|
||||
the six (<emphasis>gasp</emphasis>) attributes used for specifying
|
||||
dependencies as documented in <xref linkend="ssec-stdenv-dependencies"/>. We
|
||||
"splice" together the run-time and build-time package sets with
|
||||
<varname>callPackage</varname>, and then <varname>mkDerivation</varname> for
|
||||
each of four attributes pulls the right derivation out. This splicing can be
|
||||
skipped when not cross-compiling as the package sets are the same, but is a
|
||||
bit slow for cross-compiling. Because of this, a best-of-both-worlds
|
||||
solution is in the works with no splicing or explicit access of
|
||||
<varname>buildPackages</varname> needed. For now, feel free to use either
|
||||
method.
|
||||
How does this work in practice? Nixpkgs is now structured so that
|
||||
build-time dependencies are taken from <varname>buildPackages</varname>,
|
||||
whereas run-time dependencies are taken from the top level attribute set.
|
||||
For example, <varname>buildPackages.gcc</varname> should be used at
|
||||
build-time, while <varname>gcc</varname> should be used at run-time. Now,
|
||||
for most of Nixpkgs's history, there was no
|
||||
<varname>buildPackages</varname>, and most packages have not been
|
||||
refactored to use it explicitly. Instead, one can use the six
|
||||
(<emphasis>gasp</emphasis>) attributes used for specifying dependencies as
|
||||
documented in <xref linkend="ssec-stdenv-dependencies"/>. We "splice"
|
||||
together the run-time and build-time package sets with
|
||||
<varname>callPackage</varname>, and then <varname>mkDerivation</varname>
|
||||
for each of four attributes pulls the right derivation out. This splicing
|
||||
can be skipped when not cross-compiling as the package sets are the same,
|
||||
but is a bit slow for cross-compiling. Because of this, a
|
||||
best-of-both-worlds solution is in the works with no splicing or explicit
|
||||
access of <varname>buildPackages</varname> needed. For now, feel free to
|
||||
use either method.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
@ -311,8 +312,8 @@
|
||||
should be answered here. Ideally, the information above is exhaustive, so
|
||||
this section cannot provide any new information, but it is ludicrous and
|
||||
cruel to expect everyone to spend effort working through the interaction of
|
||||
many features just to figure out the same answer to the same common problem.
|
||||
Feel free to add to this list!
|
||||
many features just to figure out the same answer to the same common
|
||||
problem. Feel free to add to this list!
|
||||
</para>
|
||||
|
||||
<qandaset>
|
||||
@ -434,14 +435,15 @@ nix-build <nixpkgs> --arg crossSystem '{ config = "<arch>-<os>
|
||||
build plan or package set. A simple "build vs deploy" dichotomy is adequate:
|
||||
the sliding window principle described in the previous section shows how to
|
||||
interpolate between the these two "end points" to get the 3 platform triple
|
||||
for each bootstrapping stage. That means for any package a given package set,
|
||||
even those not bound on the top level but only reachable via dependencies or
|
||||
<varname>buildPackages</varname>, the three platforms will be defined as one
|
||||
of <varname>localSystem</varname> or <varname>crossSystem</varname>, with the
|
||||
former replacing the latter as one traverses build-time dependencies. A last
|
||||
simple difference is that <varname>crossSystem</varname> should be null when
|
||||
one doesn't want to cross-compile, while the <varname>*Platform</varname>s
|
||||
are always non-null. <varname>localSystem</varname> is always non-null.
|
||||
for each bootstrapping stage. That means for any package a given package
|
||||
set, even those not bound on the top level but only reachable via
|
||||
dependencies or <varname>buildPackages</varname>, the three platforms will
|
||||
be defined as one of <varname>localSystem</varname> or
|
||||
<varname>crossSystem</varname>, with the former replacing the latter as one
|
||||
traverses build-time dependencies. A last simple difference is that
|
||||
<varname>crossSystem</varname> should be null when one doesn't want to
|
||||
cross-compile, while the <varname>*Platform</varname>s are always non-null.
|
||||
<varname>localSystem</varname> is always non-null.
|
||||
</para>
|
||||
</section>
|
||||
<!--============================================================-->
|
||||
@ -455,13 +457,13 @@ nix-build <nixpkgs> --arg crossSystem '{ config = "<arch>-<os>
|
||||
<note>
|
||||
<para>
|
||||
If one explores Nixpkgs, they will see derivations with names like
|
||||
<literal>gccCross</literal>. Such <literal>*Cross</literal> derivations is a
|
||||
holdover from before we properly distinguished between the host and target
|
||||
platforms—the derivation with "Cross" in the name covered the <literal>build
|
||||
= host != target</literal> case, while the other covered the <literal>host =
|
||||
target</literal>, with build platform the same or not based on whether one
|
||||
was using its <literal>.nativeDrv</literal> or <literal>.crossDrv</literal>.
|
||||
This ugliness will disappear soon.
|
||||
<literal>gccCross</literal>. Such <literal>*Cross</literal> derivations is
|
||||
a holdover from before we properly distinguished between the host and
|
||||
target platforms—the derivation with "Cross" in the name covered the
|
||||
<literal>build = host != target</literal> case, while the other covered the
|
||||
<literal>host = target</literal>, with build platform the same or not based
|
||||
on whether one was using its <literal>.nativeDrv</literal> or
|
||||
<literal>.crossDrv</literal>. This ugliness will disappear soon.
|
||||
</para>
|
||||
</note>
|
||||
</section>
|
||||
|
@ -5,11 +5,11 @@
|
||||
<title>pkgs.appimageTools</title>
|
||||
|
||||
<para>
|
||||
<varname>pkgs.appimageTools</varname> is a set of functions for extracting and wrapping
|
||||
<link xlink:href="https://appimage.org/">AppImage</link> files.
|
||||
|
||||
They are meant to be used if traditional packaging from source is infeasible, or it would take too long.
|
||||
To quickly run an AppImage file, <literal>pkgs.appimage-run</literal> can be used as well.
|
||||
<varname>pkgs.appimageTools</varname> is a set of functions for extracting
|
||||
and wrapping <link xlink:href="https://appimage.org/">AppImage</link> files.
|
||||
They are meant to be used if traditional packaging from source is infeasible,
|
||||
or it would take too long. To quickly run an AppImage file,
|
||||
<literal>pkgs.appimage-run</literal> can be used as well.
|
||||
</para>
|
||||
|
||||
<warning>
|
||||
@ -19,13 +19,13 @@
|
||||
</para>
|
||||
</warning>
|
||||
|
||||
|
||||
<section xml:id="ssec-pkgs-appimageTools-formats">
|
||||
<title>AppImage formats</title>
|
||||
|
||||
<para>
|
||||
There are different formats for AppImages, see
|
||||
<link xlink:href="https://github.com/AppImage/AppImageSpec/blob/74ad9ca2f94bf864a4a0dac1f369dd4f00bd1c28/draft.md#image-format">the specification</link> for details.
|
||||
<link xlink:href="https://github.com/AppImage/AppImageSpec/blob/74ad9ca2f94bf864a4a0dac1f369dd4f00bd1c28/draft.md#image-format">the
|
||||
specification</link> for details.
|
||||
</para>
|
||||
|
||||
<itemizedlist>
|
||||
@ -34,7 +34,6 @@
|
||||
Type 1 images are ISO 9660 files that are also ELF executables.
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
Type 2 images are ELF executables with an appended filesystem.
|
||||
@ -46,7 +45,7 @@
|
||||
They can be told apart with <command>file -k</command>:
|
||||
</para>
|
||||
|
||||
<screen>
|
||||
<screen>
|
||||
<prompt>$ </prompt>file -k type1.AppImage
|
||||
type1.AppImage: ELF 64-bit LSB executable, x86-64, version 1 (SYSV) ISO 9660 CD-ROM filesystem data 'AppImage' (Lepton 3.x), scale 0-0,
|
||||
spot sensor temperature 0.000000, unit celsius, color scheme 0, calibration: offset 0.000000, slope 0.000000, dynamically linked, interpreter /lib64/ld-linux-x86-64.so.2, for GNU/Linux 2.6.18, BuildID[sha1]=d629f6099d2344ad82818172add1d38c5e11bc6d, stripped\012- data
|
||||
@ -56,7 +55,8 @@ type2.AppImage: ELF 64-bit LSB executable, x86-64, version 1 (SYSV) (Lepton 3.x)
|
||||
</screen>
|
||||
|
||||
<para>
|
||||
Note how the type 1 AppImage is described as an <literal>ISO 9660 CD-ROM filesystem</literal>, and the type 2 AppImage is not.
|
||||
Note how the type 1 AppImage is described as an <literal>ISO 9660 CD-ROM
|
||||
filesystem</literal>, and the type 2 AppImage is not.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@ -64,12 +64,11 @@ type2.AppImage: ELF 64-bit LSB executable, x86-64, version 1 (SYSV) (Lepton 3.x)
|
||||
<title>Wrapping</title>
|
||||
|
||||
<para>
|
||||
Depending on the type of AppImage you're wrapping, you'll have to use
|
||||
<varname>wrapType1</varname> or <varname>wrapType2</varname>.
|
||||
Depending on the type of AppImage you're wrapping, you'll have to use
|
||||
<varname>wrapType1</varname> or <varname>wrapType2</varname>.
|
||||
</para>
|
||||
|
||||
|
||||
<programlisting>
|
||||
<programlisting>
|
||||
appimageTools.wrapType2 { # or wrapType1
|
||||
name = "patchwork"; <co xml:id='ex-appimageTools-wrapping-1' />
|
||||
src = fetchurl { <co xml:id='ex-appimageTools-wrapping-2' />
|
||||
@ -79,7 +78,6 @@ appimageTools.wrapType2 { # or wrapType1
|
||||
extraPkgs = pkgs: with pkgs; [ ]; <co xml:id='ex-appimageTools-wrapping-3' />
|
||||
}</programlisting>
|
||||
|
||||
|
||||
<calloutlist>
|
||||
<callout arearefs='ex-appimageTools-wrapping-1'>
|
||||
<para>
|
||||
@ -93,29 +91,28 @@ appimageTools.wrapType2 { # or wrapType1
|
||||
</callout>
|
||||
<callout arearefs='ex-appimageTools-wrapping-2'>
|
||||
<para>
|
||||
<varname>extraPkgs</varname> allows you to pass a function to include additional packages
|
||||
inside the FHS environment your AppImage is going to run in.
|
||||
|
||||
There are a few ways to learn which dependencies an application needs:
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
Looking through the extracted AppImage files, reading its scripts and running <command>patchelf</command> and <command>ldd</command> on its executables.
|
||||
This can also be done in <command>appimage-run</command>, by setting <command>APPIMAGE_DEBUG_EXEC=bash</command>.
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
Running <command>strace -vfefile</command> on the wrapped executable, looking for libraries that can't be found.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<varname>extraPkgs</varname> allows you to pass a function to include
|
||||
additional packages inside the FHS environment your AppImage is going to
|
||||
run in. There are a few ways to learn which dependencies an application
|
||||
needs:
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
Looking through the extracted AppImage files, reading its scripts and
|
||||
running <command>patchelf</command> and <command>ldd</command> on its
|
||||
executables. This can also be done in <command>appimage-run</command>,
|
||||
by setting <command>APPIMAGE_DEBUG_EXEC=bash</command>.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
Running <command>strace -vfefile</command> on the wrapped executable,
|
||||
looking for libraries that can't be found.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</callout>
|
||||
</calloutlist>
|
||||
|
||||
</section>
|
||||
</section>
|
||||
|
@ -24,9 +24,9 @@
|
||||
|
||||
<para>
|
||||
This function is analogous to the <command>docker build</command> command,
|
||||
in that it can be used to build a Docker-compatible repository tarball containing
|
||||
a single image with one or multiple layers. As such, the result is suitable
|
||||
for being loaded in Docker with <command>docker load</command>.
|
||||
in that it can be used to build a Docker-compatible repository tarball
|
||||
containing a single image with one or multiple layers. As such, the result
|
||||
is suitable for being loaded in Docker with <command>docker load</command>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@ -190,8 +190,8 @@ buildImage {
|
||||
By default <function>buildImage</function> will use a static date of one
|
||||
second past the UNIX Epoch. This allows <function>buildImage</function> to
|
||||
produce binary reproducible images. When listing images with
|
||||
<command>docker images</command>, the newly created images will be
|
||||
listed like this:
|
||||
<command>docker images</command>, the newly created images will be listed
|
||||
like this:
|
||||
</para>
|
||||
<screen><![CDATA[
|
||||
$ docker images
|
||||
@ -402,9 +402,9 @@ pkgs.dockerTools.buildLayeredImage {
|
||||
|
||||
<para>
|
||||
This function is analogous to the <command>docker pull</command> command, in
|
||||
that it can be used to pull a Docker image from a Docker registry. By default
|
||||
<link xlink:href="https://hub.docker.com/">Docker Hub</link> is used to pull
|
||||
images.
|
||||
that it can be used to pull a Docker image from a Docker registry. By
|
||||
default <link xlink:href="https://hub.docker.com/">Docker Hub</link> is used
|
||||
to pull images.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@ -484,10 +484,10 @@ sha256:20d9485b25ecfd89204e843a962c1bd70e9cc6858d65d7f5fadc340246e2116b
|
||||
|
||||
<para>
|
||||
This function is analogous to the <command>docker export</command> command,
|
||||
in that it can be used to flatten a Docker image that contains multiple layers. It
|
||||
is in fact the result of the merge of all the layers of the image. As such,
|
||||
the result is suitable for being imported in Docker with <command>docker
|
||||
import</command>.
|
||||
in that it can be used to flatten a Docker image that contains multiple
|
||||
layers. It is in fact the result of the merge of all the layers of the
|
||||
image. As such, the result is suitable for being imported in Docker with
|
||||
<command>docker import</command>.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
|
@ -5,24 +5,21 @@
|
||||
<title>Fetcher functions</title>
|
||||
|
||||
<para>
|
||||
When using Nix, you will frequently need to download source code
|
||||
and other files from the internet. Nixpkgs comes with a few helper
|
||||
functions that allow you to fetch fixed-output derivations in a
|
||||
structured way.
|
||||
When using Nix, you will frequently need to download source code and other
|
||||
files from the internet. Nixpkgs comes with a few helper functions that allow
|
||||
you to fetch fixed-output derivations in a structured way.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The two fetcher primitives are <function>fetchurl</function> and
|
||||
<function>fetchzip</function>. Both of these have two required
|
||||
arguments, a URL and a hash. The hash is typically
|
||||
<literal>sha256</literal>, although many more hash algorithms are
|
||||
supported. Nixpkgs contributors are currently recommended to use
|
||||
<literal>sha256</literal>. This hash will be used by Nix to
|
||||
identify your source. A typical usage of fetchurl is provided
|
||||
below.
|
||||
The two fetcher primitives are <function>fetchurl</function> and
|
||||
<function>fetchzip</function>. Both of these have two required arguments, a
|
||||
URL and a hash. The hash is typically <literal>sha256</literal>, although
|
||||
many more hash algorithms are supported. Nixpkgs contributors are currently
|
||||
recommended to use <literal>sha256</literal>. This hash will be used by Nix
|
||||
to identify your source. A typical usage of fetchurl is provided below.
|
||||
</para>
|
||||
|
||||
<programlisting><![CDATA[
|
||||
<programlisting><![CDATA[
|
||||
{ stdenv, fetchurl }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
@ -35,172 +32,163 @@ stdenv.mkDerivation {
|
||||
]]></programlisting>
|
||||
|
||||
<para>
|
||||
The main difference between <function>fetchurl</function> and
|
||||
<function>fetchzip</function> is in how they store the contents.
|
||||
<function>fetchurl</function> will store the unaltered contents of
|
||||
the URL within the Nix store. <function>fetchzip</function> on the
|
||||
other hand will decompress the archive for you, making files and
|
||||
directories directly accessible in the future.
|
||||
<function>fetchzip</function> can only be used with archives.
|
||||
Despite the name, <function>fetchzip</function> is not limited to
|
||||
.zip files and can also be used with any tarball.
|
||||
The main difference between <function>fetchurl</function> and
|
||||
<function>fetchzip</function> is in how they store the contents.
|
||||
<function>fetchurl</function> will store the unaltered contents of the URL
|
||||
within the Nix store. <function>fetchzip</function> on the other hand will
|
||||
decompress the archive for you, making files and directories directly
|
||||
accessible in the future. <function>fetchzip</function> can only be used with
|
||||
archives. Despite the name, <function>fetchzip</function> is not limited to
|
||||
.zip files and can also be used with any tarball.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
<function>fetchpatch</function> works very similarly to
|
||||
<function>fetchurl</function> with the same arguments expected. It
|
||||
expects patch files as a source and and performs normalization on
|
||||
them before computing the checksum. For example it will remove
|
||||
comments or other unstable parts that are sometimes added by
|
||||
version control systems and can change over time.
|
||||
<function>fetchpatch</function> works very similarly to
|
||||
<function>fetchurl</function> with the same arguments expected. It expects
|
||||
patch files as a source and and performs normalization on them before
|
||||
computing the checksum. For example it will remove comments or other unstable
|
||||
parts that are sometimes added by version control systems and can change over
|
||||
time.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Other fetcher functions allow you to add source code directly from
|
||||
a VCS such as subversion or git. These are mostly straightforward
|
||||
names based on the name of the command used with the VCS system.
|
||||
Because they give you a working repository, they act most like
|
||||
<function>fetchzip</function>.
|
||||
Other fetcher functions allow you to add source code directly from a VCS such
|
||||
as subversion or git. These are mostly straightforward names based on the
|
||||
name of the command used with the VCS system. Because they give you a working
|
||||
repository, they act most like <function>fetchzip</function>.
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchsvn</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with Subversion. Expects <literal>url</literal> to a
|
||||
Subversion directory, <literal>rev</literal>, and
|
||||
<literal>sha256</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchgit</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with Git. Expects <literal>url</literal> to a Git repo,
|
||||
<literal>rev</literal>, and <literal>sha256</literal>.
|
||||
<literal>rev</literal> in this case can be full the git commit
|
||||
id (SHA1 hash) or a tag name like
|
||||
<literal>refs/tags/v1.0</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchfossil</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with Fossil. Expects <literal>url</literal> to a Fossil
|
||||
archive, <literal>rev</literal>, and <literal>sha256</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchcvs</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with CVS. Expects <literal>cvsRoot</literal>,
|
||||
<literal>tag</literal>, and <literal>sha256</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchhg</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with Mercurial. Expects <literal>url</literal>,
|
||||
<literal>rev</literal>, and <literal>sha256</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchsvn</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with Subversion. Expects <literal>url</literal> to a Subversion
|
||||
directory, <literal>rev</literal>, and <literal>sha256</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchgit</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with Git. Expects <literal>url</literal> to a Git repo,
|
||||
<literal>rev</literal>, and <literal>sha256</literal>.
|
||||
<literal>rev</literal> in this case can be full the git commit id (SHA1
|
||||
hash) or a tag name like <literal>refs/tags/v1.0</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchfossil</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with Fossil. Expects <literal>url</literal> to a Fossil archive,
|
||||
<literal>rev</literal>, and <literal>sha256</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchcvs</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with CVS. Expects <literal>cvsRoot</literal>, <literal>tag</literal>,
|
||||
and <literal>sha256</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchhg</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Used with Mercurial. Expects <literal>url</literal>,
|
||||
<literal>rev</literal>, and <literal>sha256</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
|
||||
<para>
|
||||
A number of fetcher functions wrap part of
|
||||
<function>fetchurl</function> and <function>fetchzip</function>.
|
||||
They are mainly convenience functions intended for commonly used
|
||||
destinations of source code in Nixpkgs. These wrapper fetchers are
|
||||
listed below.
|
||||
A number of fetcher functions wrap part of <function>fetchurl</function> and
|
||||
<function>fetchzip</function>. They are mainly convenience functions intended
|
||||
for commonly used destinations of source code in Nixpkgs. These wrapper
|
||||
fetchers are listed below.
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromGitHub</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
<function>fetchFromGitHub</function> expects four arguments.
|
||||
<literal>owner</literal> is a string corresponding to the
|
||||
GitHub user or organization that controls this repository.
|
||||
<literal>repo</literal> corresponds to the name of the
|
||||
software repository. These are located at the top of every
|
||||
GitHub HTML page as
|
||||
<literal>owner</literal>/<literal>repo</literal>.
|
||||
<literal>rev</literal> corresponds to the Git commit hash or
|
||||
tag (e.g <literal>v1.0</literal>) that will be downloaded from
|
||||
Git. Finally, <literal>sha256</literal> corresponds to the
|
||||
hash of the extracted directory. Again, other hash algorithms
|
||||
are also available but <literal>sha256</literal> is currently
|
||||
preferred.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromGitLab</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is used with GitLab repositories. The arguments expected
|
||||
are very similar to fetchFromGitHub above.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromBitbucket</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is used with BitBucket repositories. The arguments expected
|
||||
are very similar to fetchFromGitHub above.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromSavannah</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is used with Savannah repositories. The arguments expected
|
||||
are very similar to fetchFromGitHub above.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromRepoOrCz</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is used with repo.or.cz repositories. The arguments
|
||||
expected are very similar to fetchFromGitHub above.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromGitHub</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
<function>fetchFromGitHub</function> expects four arguments.
|
||||
<literal>owner</literal> is a string corresponding to the GitHub user or
|
||||
organization that controls this repository. <literal>repo</literal>
|
||||
corresponds to the name of the software repository. These are located at
|
||||
the top of every GitHub HTML page as
|
||||
<literal>owner</literal>/<literal>repo</literal>. <literal>rev</literal>
|
||||
corresponds to the Git commit hash or tag (e.g <literal>v1.0</literal>)
|
||||
that will be downloaded from Git. Finally, <literal>sha256</literal>
|
||||
corresponds to the hash of the extracted directory. Again, other hash
|
||||
algorithms are also available but <literal>sha256</literal> is currently
|
||||
preferred.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromGitLab</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is used with GitLab repositories. The arguments expected are very
|
||||
similar to fetchFromGitHub above.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromBitbucket</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is used with BitBucket repositories. The arguments expected are very
|
||||
similar to fetchFromGitHub above.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromSavannah</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is used with Savannah repositories. The arguments expected are very
|
||||
similar to fetchFromGitHub above.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>fetchFromRepoOrCz</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This is used with repo.or.cz repositories. The arguments expected are very
|
||||
similar to fetchFromGitHub above.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
|
||||
|
||||
</section>
|
||||
|
@ -13,12 +13,17 @@
|
||||
|
||||
<xi:include href="./library/attrsets.xml" />
|
||||
|
||||
<!-- These docs are generated via nixdoc. To add another generated
|
||||
<!-- These docs are generated via nixdoc. To add another generated
|
||||
library function file to this list, the file
|
||||
`lib-function-docs.nix` must also be updated. -->
|
||||
|
||||
<xi:include href="./library/generated/strings.xml" />
|
||||
|
||||
<xi:include href="./library/generated/trivial.xml" />
|
||||
|
||||
<xi:include href="./library/generated/lists.xml" />
|
||||
|
||||
<xi:include href="./library/generated/debug.xml" />
|
||||
|
||||
<xi:include href="./library/generated/options.xml" />
|
||||
</section>
|
||||
|
@ -14,15 +14,15 @@
|
||||
<title>Usage</title>
|
||||
|
||||
<para>
|
||||
<literal>pkgs.nix-gitignore</literal> exports a number of functions, but
|
||||
you'll most likely need either <literal>gitignoreSource</literal> or
|
||||
<literal>gitignoreSourcePure</literal>. As their first argument, they both
|
||||
accept either 1. a file with gitignore lines or 2. a string
|
||||
with gitignore lines, or 3. a list of either of the two. They will be
|
||||
concatenated into a single big string.
|
||||
<literal>pkgs.nix-gitignore</literal> exports a number of functions, but
|
||||
you'll most likely need either <literal>gitignoreSource</literal> or
|
||||
<literal>gitignoreSourcePure</literal>. As their first argument, they both
|
||||
accept either 1. a file with gitignore lines or 2. a string with gitignore
|
||||
lines, or 3. a list of either of the two. They will be concatenated into a
|
||||
single big string.
|
||||
</para>
|
||||
|
||||
<programlisting><![CDATA[
|
||||
<programlisting><![CDATA[
|
||||
{ pkgs ? import <nixpkgs> {} }:
|
||||
|
||||
nix-gitignore.gitignoreSource [] ./source
|
||||
@ -40,24 +40,29 @@
|
||||
]]></programlisting>
|
||||
|
||||
<para>
|
||||
These functions are derived from the <literal>Filter</literal> functions
|
||||
by setting the first filter argument to <literal>(_: _: true)</literal>:
|
||||
These functions are derived from the <literal>Filter</literal> functions by
|
||||
setting the first filter argument to <literal>(_: _: true)</literal>:
|
||||
</para>
|
||||
|
||||
<programlisting><![CDATA[
|
||||
<programlisting><![CDATA[
|
||||
gitignoreSourcePure = gitignoreFilterSourcePure (_: _: true);
|
||||
gitignoreSource = gitignoreFilterSource (_: _: true);
|
||||
]]></programlisting>
|
||||
|
||||
<para>
|
||||
Those filter functions accept the same arguments the <literal>builtins.filterSource</literal> function would pass to its filters, thus <literal>fn: gitignoreFilterSourcePure fn ""</literal> should be extensionally equivalent to <literal>filterSource</literal>. The file is blacklisted iff it's blacklisted by either your filter or the gitignoreFilter.
|
||||
Those filter functions accept the same arguments the
|
||||
<literal>builtins.filterSource</literal> function would pass to its filters,
|
||||
thus <literal>fn: gitignoreFilterSourcePure fn ""</literal> should be
|
||||
extensionally equivalent to <literal>filterSource</literal>. The file is
|
||||
blacklisted iff it's blacklisted by either your filter or the
|
||||
gitignoreFilter.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you want to make your own filter from scratch, you may use
|
||||
</para>
|
||||
If you want to make your own filter from scratch, you may use
|
||||
</para>
|
||||
|
||||
<programlisting><![CDATA[
|
||||
<programlisting><![CDATA[
|
||||
gitignoreFilter = ign: root: filterPattern (gitignoreToPatterns ign) root;
|
||||
]]></programlisting>
|
||||
</section>
|
||||
@ -66,10 +71,11 @@ gitignoreFilter = ign: root: filterPattern (gitignoreToPatterns ign) root;
|
||||
<title>gitignore files in subdirectories</title>
|
||||
|
||||
<para>
|
||||
If you wish to use a filter that would search for .gitignore files in subdirectories, just like git does by default, use this function:
|
||||
</para>
|
||||
If you wish to use a filter that would search for .gitignore files in
|
||||
subdirectories, just like git does by default, use this function:
|
||||
</para>
|
||||
|
||||
<programlisting><![CDATA[
|
||||
<programlisting><![CDATA[
|
||||
gitignoreFilterRecursiveSource = filter: patterns: root:
|
||||
# OR
|
||||
gitignoreRecursiveSource = gitignoreFilterSourcePure (_: _: true);
|
||||
|
@ -7,17 +7,15 @@
|
||||
<para>
|
||||
<function>prefer-remote-fetch</function> is an overlay that download sources
|
||||
on remote builder. This is useful when the evaluating machine has a slow
|
||||
upload while the builder can fetch faster directly from the source.
|
||||
To use it, put the following snippet as a new overlay:
|
||||
<programlisting>
|
||||
upload while the builder can fetch faster directly from the source. To use
|
||||
it, put the following snippet as a new overlay:
|
||||
<programlisting>
|
||||
self: super:
|
||||
(super.prefer-remote-fetch self super)
|
||||
</programlisting>
|
||||
|
||||
A full configuration example for that sets the overlay up for your own account,
|
||||
could look like this
|
||||
|
||||
<programlisting>
|
||||
A full configuration example for that sets the overlay up for your own
|
||||
account, could look like this
|
||||
<programlisting>
|
||||
$ mkdir ~/.config/nixpkgs/overlays/
|
||||
$ cat > ~/.config/nixpkgs/overlays/prefer-remote-fetch.nix <<EOF
|
||||
self: super: super.prefer-remote-fetch self super
|
||||
|
@ -5,12 +5,11 @@
|
||||
<title>Trivial builders</title>
|
||||
|
||||
<para>
|
||||
Nixpkgs provides a couple of functions that help with building
|
||||
derivations. The most important one,
|
||||
<function>stdenv.mkDerivation</function>, has already been
|
||||
documented above. The following functions wrap
|
||||
<function>stdenv.mkDerivation</function>, making it easier to use
|
||||
in certain cases.
|
||||
Nixpkgs provides a couple of functions that help with building derivations.
|
||||
The most important one, <function>stdenv.mkDerivation</function>, has already
|
||||
been documented above. The following functions wrap
|
||||
<function>stdenv.mkDerivation</function>, making it easier to use in certain
|
||||
cases.
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
@ -19,26 +18,23 @@
|
||||
<literal>runCommand</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This takes three arguments, <literal>name</literal>,
|
||||
<literal>env</literal>, and <literal>buildCommand</literal>.
|
||||
<literal>name</literal> is just the name that Nix will append
|
||||
to the store path in the same way that
|
||||
<literal>stdenv.mkDerivation</literal> uses its
|
||||
<literal>name</literal> attribute. <literal>env</literal> is an
|
||||
attribute set specifying environment variables that will be set
|
||||
for this derivation. These attributes are then passed to the
|
||||
wrapped <literal>stdenv.mkDerivation</literal>.
|
||||
<literal>buildCommand</literal> specifies the commands that
|
||||
will be run to create this derivation. Note that you will need
|
||||
to create <literal>$out</literal> for Nix to register the
|
||||
command as successful.
|
||||
</para>
|
||||
<para>
|
||||
An example of using <literal>runCommand</literal> is provided
|
||||
below.
|
||||
</para>
|
||||
<programlisting>
|
||||
<para>
|
||||
This takes three arguments, <literal>name</literal>,
|
||||
<literal>env</literal>, and <literal>buildCommand</literal>.
|
||||
<literal>name</literal> is just the name that Nix will append to the store
|
||||
path in the same way that <literal>stdenv.mkDerivation</literal> uses its
|
||||
<literal>name</literal> attribute. <literal>env</literal> is an attribute
|
||||
set specifying environment variables that will be set for this derivation.
|
||||
These attributes are then passed to the wrapped
|
||||
<literal>stdenv.mkDerivation</literal>. <literal>buildCommand</literal>
|
||||
specifies the commands that will be run to create this derivation. Note
|
||||
that you will need to create <literal>$out</literal> for Nix to register
|
||||
the command as successful.
|
||||
</para>
|
||||
<para>
|
||||
An example of using <literal>runCommand</literal> is provided below.
|
||||
</para>
|
||||
<programlisting>
|
||||
(import <nixpkgs> {}).runCommand "my-example" {} ''
|
||||
echo My example command is running
|
||||
|
||||
@ -65,41 +61,35 @@
|
||||
<literal>runCommandCC</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This works just like <literal>runCommand</literal>. The only
|
||||
difference is that it also provides a C compiler in
|
||||
<literal>buildCommand</literal>’s environment. To minimize your
|
||||
dependencies, you should only use this if you are sure you will
|
||||
need a C compiler as part of running your command.
|
||||
<para>
|
||||
This works just like <literal>runCommand</literal>. The only difference is
|
||||
that it also provides a C compiler in <literal>buildCommand</literal>’s
|
||||
environment. To minimize your dependencies, you should only use this if
|
||||
you are sure you will need a C compiler as part of running your command.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>writeTextFile</literal>, <literal>writeText</literal>,
|
||||
<literal>writeTextDir</literal>, <literal>writeScript</literal>,
|
||||
<literal>writeScriptBin</literal>
|
||||
<literal>writeTextFile</literal>, <literal>writeText</literal>, <literal>writeTextDir</literal>, <literal>writeScript</literal>, <literal>writeScriptBin</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
These functions write <literal>text</literal> to the Nix store.
|
||||
This is useful for creating scripts from Nix expressions.
|
||||
<literal>writeTextFile</literal> takes an attribute set and
|
||||
expects two arguments, <literal>name</literal> and
|
||||
<literal>text</literal>. <literal>name</literal> corresponds to
|
||||
the name used in the Nix store path. <literal>text</literal>
|
||||
will be the contents of the file. You can also set
|
||||
<literal>executable</literal> to true to make this file have
|
||||
the executable bit set.
|
||||
</para>
|
||||
<para>
|
||||
Many more commands wrap <literal>writeTextFile</literal>
|
||||
including <literal>writeText</literal>,
|
||||
<literal>writeTextDir</literal>,
|
||||
<literal>writeScript</literal>, and
|
||||
<literal>writeScriptBin</literal>. These are convenience
|
||||
functions over <literal>writeTextFile</literal>.
|
||||
</para>
|
||||
<para>
|
||||
These functions write <literal>text</literal> to the Nix store. This is
|
||||
useful for creating scripts from Nix expressions.
|
||||
<literal>writeTextFile</literal> takes an attribute set and expects two
|
||||
arguments, <literal>name</literal> and <literal>text</literal>.
|
||||
<literal>name</literal> corresponds to the name used in the Nix store
|
||||
path. <literal>text</literal> will be the contents of the file. You can
|
||||
also set <literal>executable</literal> to true to make this file have the
|
||||
executable bit set.
|
||||
</para>
|
||||
<para>
|
||||
Many more commands wrap <literal>writeTextFile</literal> including
|
||||
<literal>writeText</literal>, <literal>writeTextDir</literal>,
|
||||
<literal>writeScript</literal>, and <literal>writeScriptBin</literal>.
|
||||
These are convenience functions over <literal>writeTextFile</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -109,16 +99,15 @@
|
||||
<listitem>
|
||||
<para>
|
||||
This can be used to put many derivations into the same directory
|
||||
structure. It works by creating a new derivation and adding
|
||||
symlinks to each of the paths listed. It expects two arguments,
|
||||
structure. It works by creating a new derivation and adding symlinks to
|
||||
each of the paths listed. It expects two arguments,
|
||||
<literal>name</literal>, and <literal>paths</literal>.
|
||||
<literal>name</literal> is the name used in the Nix store path
|
||||
for the created derivation. <literal>paths</literal> is a list of
|
||||
paths that will be symlinked. These paths can be to Nix store
|
||||
derivations or any other subdirectory contained within.
|
||||
<literal>name</literal> is the name used in the Nix store path for the
|
||||
created derivation. <literal>paths</literal> is a list of paths that will
|
||||
be symlinked. These paths can be to Nix store derivations or any other
|
||||
subdirectory contained within.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
|
||||
</section>
|
||||
|
@ -4,39 +4,38 @@
|
||||
<title>OCaml</title>
|
||||
|
||||
<para>
|
||||
OCaml libraries should be installed in
|
||||
<literal>$(out)/lib/ocaml/${ocaml.version}/site-lib/</literal>. Such
|
||||
directories are automatically added to the <literal>$OCAMLPATH</literal>
|
||||
environment variable when building another package that depends on them
|
||||
or when opening a <literal>nix-shell</literal>.
|
||||
OCaml libraries should be installed in
|
||||
<literal>$(out)/lib/ocaml/${ocaml.version}/site-lib/</literal>. Such
|
||||
directories are automatically added to the <literal>$OCAMLPATH</literal>
|
||||
environment variable when building another package that depends on them or
|
||||
when opening a <literal>nix-shell</literal>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Given that most of the OCaml ecosystem is now built with dune,
|
||||
nixpkgs includes a convenience build support function called
|
||||
<literal>buildDunePackage</literal> that will build an OCaml package
|
||||
using dune, OCaml and findlib and any additional dependencies provided
|
||||
as <literal>buildInputs</literal> or <literal>propagatedBuildInputs</literal>.
|
||||
Given that most of the OCaml ecosystem is now built with dune, nixpkgs
|
||||
includes a convenience build support function called
|
||||
<literal>buildDunePackage</literal> that will build an OCaml package using
|
||||
dune, OCaml and findlib and any additional dependencies provided as
|
||||
<literal>buildInputs</literal> or <literal>propagatedBuildInputs</literal>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is a simple package example. It defines an (optional) attribute
|
||||
<literal>minimumOCamlVersion</literal> that will be used to throw a
|
||||
descriptive evaluation error if building with an older OCaml is attempted.
|
||||
It uses the <literal>fetchFromGitHub</literal> fetcher to get its source.
|
||||
It sets the <literal>doCheck</literal> (optional) attribute to
|
||||
<literal>true</literal> which means that tests will be run with
|
||||
<literal>dune runtest -p angstrom</literal> after the build
|
||||
(<literal>dune build -p angstrom</literal>) is complete.
|
||||
It uses <literal>alcotest</literal> as a build input (because it is needed
|
||||
to run the tests) and <literal>bigstringaf</literal> and
|
||||
<literal>result</literal> as propagated build inputs (thus they will also
|
||||
be available to libraries depending on this library).
|
||||
The library will be installed using the <literal>angstrom.install</literal>
|
||||
file that dune generates.
|
||||
Here is a simple package example. It defines an (optional) attribute
|
||||
<literal>minimumOCamlVersion</literal> that will be used to throw a
|
||||
descriptive evaluation error if building with an older OCaml is attempted. It
|
||||
uses the <literal>fetchFromGitHub</literal> fetcher to get its source. It
|
||||
sets the <literal>doCheck</literal> (optional) attribute to
|
||||
<literal>true</literal> which means that tests will be run with <literal>dune
|
||||
runtest -p angstrom</literal> after the build (<literal>dune build -p
|
||||
angstrom</literal>) is complete. It uses <literal>alcotest</literal> as a
|
||||
build input (because it is needed to run the tests) and
|
||||
<literal>bigstringaf</literal> and <literal>result</literal> as propagated
|
||||
build inputs (thus they will also be available to libraries depending on this
|
||||
library). The library will be installed using the
|
||||
<literal>angstrom.install</literal> file that dune generates.
|
||||
</para>
|
||||
|
||||
<programlisting>
|
||||
<programlisting>
|
||||
{ stdenv, fetchFromGitHub, buildDunePackage, alcotest, result, bigstringaf }:
|
||||
|
||||
buildDunePackage rec {
|
||||
@ -66,14 +65,14 @@ buildDunePackage rec {
|
||||
</programlisting>
|
||||
|
||||
<para>
|
||||
Here is a second example, this time using a source archive generated with
|
||||
<literal>dune-release</literal>. It is a good idea to use this archive when
|
||||
it is available as it will usually contain substituted variables such as a
|
||||
<literal>%%VERSION%%</literal> field. This library does not depend
|
||||
on any other OCaml library and no tests are run after building it.
|
||||
Here is a second example, this time using a source archive generated with
|
||||
<literal>dune-release</literal>. It is a good idea to use this archive when
|
||||
it is available as it will usually contain substituted variables such as a
|
||||
<literal>%%VERSION%%</literal> field. This library does not depend on any
|
||||
other OCaml library and no tests are run after building it.
|
||||
</para>
|
||||
|
||||
<programlisting>
|
||||
<programlisting>
|
||||
{ stdenv, fetchurl, buildDunePackage }:
|
||||
|
||||
buildDunePackage rec {
|
||||
@ -95,5 +94,4 @@ buildDunePackage rec {
|
||||
};
|
||||
}
|
||||
</programlisting>
|
||||
|
||||
</section>
|
||||
|
@ -307,19 +307,20 @@ packageOverrides = pkgs: {
|
||||
</screen>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-elm">
|
||||
<title>Elm</title>
|
||||
|
||||
<para>
|
||||
To update Elm compiler, see <filename>nixpkgs/pkgs/development/compilers/elm/README.md</filename>.
|
||||
To update Elm compiler, see
|
||||
<filename>nixpkgs/pkgs/development/compilers/elm/README.md</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To package Elm applications, <link xlink:href="https://github.com/hercules-ci/elm2nix#elm2nix">read about elm2nix</link>.
|
||||
To package Elm applications,
|
||||
<link xlink:href="https://github.com/hercules-ci/elm2nix#elm2nix">read about
|
||||
elm2nix</link>.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section xml:id="sec-shell-helpers">
|
||||
<title>Interactive shell helpers</title>
|
||||
|
||||
|
@ -96,8 +96,8 @@
|
||||
</programlisting>
|
||||
<para>
|
||||
The package <literal>xcbuild</literal> can be used to build projects that
|
||||
really depend on Xcode. However, this replacement is not 100%
|
||||
compatible with Xcode and can occasionally cause issues.
|
||||
really depend on Xcode. However, this replacement is not 100% compatible
|
||||
with Xcode and can occasionally cause issues.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
@ -148,8 +148,8 @@ $ git add pkgs/development/libraries/libfoo/default.nix</screen>
|
||||
<listitem>
|
||||
<para>
|
||||
You can use <command>nix-prefetch-url</command>
|
||||
<replaceable>url</replaceable> to get the
|
||||
SHA-256 hash of source distributions. There are similar commands as
|
||||
<replaceable>url</replaceable> to get the SHA-256 hash of source
|
||||
distributions. There are similar commands as
|
||||
<command>nix-prefetch-git</command> and
|
||||
<command>nix-prefetch-hg</command> available in
|
||||
<literal>nix-prefetch-scripts</literal> package.
|
||||
|
@ -24,11 +24,13 @@
|
||||
<para>
|
||||
The high change rate of Nixpkgs makes any pull request that remains open for
|
||||
too long subject to conflicts that will require extra work from the submitter
|
||||
or the merger. Reviewing pull requests in a timely manner and being responsive
|
||||
to the comments is the key to avoid this issue. GitHub provides sort filters
|
||||
that can be used to see the <link
|
||||
or the merger. Reviewing pull requests in a timely manner and being
|
||||
responsive to the comments is the key to avoid this issue. GitHub provides
|
||||
sort filters that can be used to see the
|
||||
<link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/pulls?q=is%3Apr+is%3Aopen+sort%3Aupdated-desc">most
|
||||
recently</link> and the <link
|
||||
recently</link> and the
|
||||
<link
|
||||
xlink:href="https://github.com/NixOS/nixpkgs/pulls?q=is%3Apr+is%3Aopen+sort%3Aupdated-asc">least
|
||||
recently</link> updated pull requests. We highly encourage looking at
|
||||
<link xlink:href="https://github.com/NixOS/nixpkgs/pulls?q=is%3Apr+is%3Aopen+review%3Anone+status%3Asuccess+-label%3A%222.status%3A+work-in-progress%22+no%3Aproject+no%3Aassignee+no%3Amilestone">
|
||||
@ -609,8 +611,8 @@ policy.
|
||||
create an issue or post on
|
||||
<link
|
||||
xlink:href="https://discourse.nixos.org">Discourse</link> with
|
||||
references of packages and modules they maintain so the maintainership can be
|
||||
taken over by other contributors.
|
||||
references of packages and modules they maintain so the maintainership can
|
||||
be taken over by other contributors.
|
||||
</para>
|
||||
</section>
|
||||
</chapter>
|
||||
|
566
doc/stdenv.xml
566
doc/stdenv.xml
@ -228,18 +228,17 @@ genericBuild
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The extension of <envar>PATH</envar> with dependencies, alluded to
|
||||
above, proceeds according to the relative platforms alone. The
|
||||
process is carried out only for dependencies whose host platform
|
||||
matches the new derivation's build platform i.e. dependencies which
|
||||
run on the platform where the new derivation will be built.
|
||||
The extension of <envar>PATH</envar> with dependencies, alluded to above,
|
||||
proceeds according to the relative platforms alone. The process is carried
|
||||
out only for dependencies whose host platform matches the new derivation's
|
||||
build platform i.e. dependencies which run on the platform where the new
|
||||
derivation will be built.
|
||||
<footnote xml:id="footnote-stdenv-native-dependencies-in-path">
|
||||
<para>
|
||||
Currently, this means for native builds all dependencies are put
|
||||
on the <envar>PATH</envar>. But in the future that may not be the
|
||||
case for sake of matching cross: the platforms would be assumed
|
||||
to be unique for native and cross builds alike, so only the
|
||||
<varname>depsBuild*</varname> and
|
||||
Currently, this means for native builds all dependencies are put on the
|
||||
<envar>PATH</envar>. But in the future that may not be the case for sake
|
||||
of matching cross: the platforms would be assumed to be unique for native
|
||||
and cross builds alike, so only the <varname>depsBuild*</varname> and
|
||||
<varname>nativeBuildInputs</varname> would be added to the
|
||||
<envar>PATH</envar>.
|
||||
</para>
|
||||
@ -252,9 +251,10 @@ genericBuild
|
||||
<para>
|
||||
The dependency is propagated when it forces some of its other-transitive
|
||||
(non-immediate) downstream dependencies to also take it on as an immediate
|
||||
dependency. Nix itself already takes a package's transitive dependencies into
|
||||
account, but this propagation ensures nixpkgs-specific infrastructure like
|
||||
setup hooks (mentioned above) also are run as if the propagated dependency.
|
||||
dependency. Nix itself already takes a package's transitive dependencies
|
||||
into account, but this propagation ensures nixpkgs-specific infrastructure
|
||||
like setup hooks (mentioned above) also are run as if the propagated
|
||||
dependency.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@ -270,9 +270,9 @@ genericBuild
|
||||
described by the current dependency's platform offsets. This results in sort
|
||||
a transitive closure of the dependency relation, with the offsets being
|
||||
approximately summed when two dependency links are combined. We also prune
|
||||
transitive dependencies whose combined offsets go out-of-bounds, which can be
|
||||
viewed as a filter over that transitive closure removing dependencies that
|
||||
are blatantly absurd.
|
||||
transitive dependencies whose combined offsets go out-of-bounds, which can
|
||||
be viewed as a filter over that transitive closure removing dependencies
|
||||
that are blatantly absurd.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@ -287,8 +287,8 @@ genericBuild
|
||||
propagation logic.
|
||||
</para>
|
||||
</footnote>
|
||||
They're confusing in very different ways so... hopefully if something doesn't
|
||||
make sense in one presentation, it will in the other!
|
||||
They're confusing in very different ways so... hopefully if something
|
||||
doesn't make sense in one presentation, it will in the other!
|
||||
<programlisting>
|
||||
let mapOffset(h, t, i) = i + (if i <= 0 then h else t - 1)
|
||||
|
||||
@ -324,31 +324,31 @@ let f(h, h + 1, i) = i + (if i <= 0 then h else (h + 1) - 1)
|
||||
let f(h, h + 1, i) = i + (if i <= 0 then h else h)
|
||||
let f(h, h + 1, i) = i + h
|
||||
</programlisting>
|
||||
This is where "sum-like" comes in from above: We can just sum all of the host
|
||||
offsets to get the host offset of the transitive dependency. The target
|
||||
offset is the transitive dependency is simply the host offset + 1, just as it
|
||||
was with the dependencies composed to make this transitive one; it can be
|
||||
This is where "sum-like" comes in from above: We can just sum all of the
|
||||
host offsets to get the host offset of the transitive dependency. The target
|
||||
offset is the transitive dependency is simply the host offset + 1, just as
|
||||
it was with the dependencies composed to make this transitive one; it can be
|
||||
ignored as it doesn't add any new information.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Because of the bounds checks, the uncommon cases are <literal>h = t</literal>
|
||||
and <literal>h + 2 = t</literal>. In the former case, the motivation for
|
||||
<function>mapOffset</function> is that since its host and target platforms
|
||||
are the same, no transitive dependency of it should be able to "discover" an
|
||||
offset greater than its reduced target offsets.
|
||||
Because of the bounds checks, the uncommon cases are <literal>h =
|
||||
t</literal> and <literal>h + 2 = t</literal>. In the former case, the
|
||||
motivation for <function>mapOffset</function> is that since its host and
|
||||
target platforms are the same, no transitive dependency of it should be able
|
||||
to "discover" an offset greater than its reduced target offsets.
|
||||
<function>mapOffset</function> effectively "squashes" all its transitive
|
||||
dependencies' offsets so that none will ever be greater than the target
|
||||
offset of the original <literal>h = t</literal> package. In the other case,
|
||||
<literal>h + 1</literal> is skipped over between the host and target offsets.
|
||||
Instead of squashing the offsets, we need to "rip" them apart so no
|
||||
<literal>h + 1</literal> is skipped over between the host and target
|
||||
offsets. Instead of squashing the offsets, we need to "rip" them apart so no
|
||||
transitive dependencies' offset is that one.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Overall, the unifying theme here is that propagation shouldn't be introducing
|
||||
transitive dependencies involving platforms the depending package is unaware
|
||||
of. The offset bounds checking and definition of
|
||||
Overall, the unifying theme here is that propagation shouldn't be
|
||||
introducing transitive dependencies involving platforms the depending
|
||||
package is unaware of. The offset bounds checking and definition of
|
||||
<function>mapOffset</function> together ensure that this is the case.
|
||||
Discovering a new offset is discovering a new platform, and since those
|
||||
platforms weren't in the derivation "spec" of the needing package, they
|
||||
@ -381,8 +381,8 @@ let f(h, h + 1, i) = i + h
|
||||
Since these packages are able to be run at build-time, they are always
|
||||
added to the <envar>PATH</envar>, as described above. But since these
|
||||
packages are only guaranteed to be able to run then, they shouldn't
|
||||
persist as run-time dependencies. This isn't currently enforced, but could
|
||||
be in the future.
|
||||
persist as run-time dependencies. This isn't currently enforced, but
|
||||
could be in the future.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -396,10 +396,10 @@ let f(h, h + 1, i) = i + h
|
||||
platform, and target platform is the new derivation's host platform. This
|
||||
means a <literal>-1</literal> host offset and <literal>0</literal> target
|
||||
offset from the new derivation's platforms. These are programs and
|
||||
libraries used at build-time that, if they are a compiler or similar tool,
|
||||
produce code to run at run-time—i.e. tools used to build the new
|
||||
derivation. If the dependency doesn't care about the target platform (i.e.
|
||||
isn't a compiler or similar tool), put it here, rather than in
|
||||
libraries used at build-time that, if they are a compiler or similar
|
||||
tool, produce code to run at run-time—i.e. tools used to build the new
|
||||
derivation. If the dependency doesn't care about the target platform
|
||||
(i.e. isn't a compiler or similar tool), put it here, rather than in
|
||||
<varname>depsBuildBuild</varname> or <varname>depsBuildTarget</varname>.
|
||||
This could be called <varname>depsBuildHost</varname> but
|
||||
<varname>nativeBuildInputs</varname> is used for historical continuity.
|
||||
@ -407,8 +407,9 @@ let f(h, h + 1, i) = i + h
|
||||
<para>
|
||||
Since these packages are able to be run at build-time, they are added to
|
||||
the <envar>PATH</envar>, as described above. But since these packages are
|
||||
only guaranteed to be able to run then, they shouldn't persist as run-time
|
||||
dependencies. This isn't currently enforced, but could be in the future.
|
||||
only guaranteed to be able to run then, they shouldn't persist as
|
||||
run-time dependencies. This isn't currently enforced, but could be in the
|
||||
future.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -421,33 +422,36 @@ let f(h, h + 1, i) = i + h
|
||||
A list of dependencies whose host platform is the new derivation's build
|
||||
platform, and target platform is the new derivation's target platform.
|
||||
This means a <literal>-1</literal> host offset and <literal>1</literal>
|
||||
target offset from the new derivation's platforms. These are programs used
|
||||
at build time that produce code to run with code produced by the depending
|
||||
package. Most commonly, these are tools used to build the runtime or
|
||||
standard library that the currently-being-built compiler will inject into
|
||||
any code it compiles. In many cases, the currently-being-built-compiler is
|
||||
itself employed for that task, but when that compiler won't run (i.e. its
|
||||
build and host platform differ) this is not possible. Other times, the
|
||||
compiler relies on some other tool, like binutils, that is always built
|
||||
separately so that the dependency is unconditional.
|
||||
target offset from the new derivation's platforms. These are programs
|
||||
used at build time that produce code to run with code produced by the
|
||||
depending package. Most commonly, these are tools used to build the
|
||||
runtime or standard library that the currently-being-built compiler will
|
||||
inject into any code it compiles. In many cases, the
|
||||
currently-being-built-compiler is itself employed for that task, but when
|
||||
that compiler won't run (i.e. its build and host platform differ) this is
|
||||
not possible. Other times, the compiler relies on some other tool, like
|
||||
binutils, that is always built separately so that the dependency is
|
||||
unconditional.
|
||||
</para>
|
||||
<para>
|
||||
This is a somewhat confusing concept to wrap one’s head around, and for
|
||||
good reason. As the only dependency type where the platform offsets are
|
||||
not adjacent integers, it requires thinking of a bootstrapping stage
|
||||
<emphasis>two</emphasis> away from the current one. It and its use-case go
|
||||
hand in hand and are both considered poor form: try to not need this sort
|
||||
of dependency, and try to avoid building standard libraries and runtimes
|
||||
in the same derivation as the compiler produces code using them. Instead
|
||||
strive to build those like a normal library, using the newly-built
|
||||
compiler just as a normal library would. In short, do not use this
|
||||
attribute unless you are packaging a compiler and are sure it is needed.
|
||||
<emphasis>two</emphasis> away from the current one. It and its use-case
|
||||
go hand in hand and are both considered poor form: try to not need this
|
||||
sort of dependency, and try to avoid building standard libraries and
|
||||
runtimes in the same derivation as the compiler produces code using them.
|
||||
Instead strive to build those like a normal library, using the
|
||||
newly-built compiler just as a normal library would. In short, do not use
|
||||
this attribute unless you are packaging a compiler and are sure it is
|
||||
needed.
|
||||
</para>
|
||||
<para>
|
||||
Since these packages are able to run at build time, they are added to the
|
||||
<envar>PATH</envar>, as described above. But since these packages are only
|
||||
guaranteed to be able to run then, they shouldn't persist as run-time
|
||||
dependencies. This isn't currently enforced, but could be in the future.
|
||||
<envar>PATH</envar>, as described above. But since these packages are
|
||||
only guaranteed to be able to run then, they shouldn't persist as
|
||||
run-time dependencies. This isn't currently enforced, but could be in the
|
||||
future.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -462,11 +466,11 @@ let f(h, h + 1, i) = i + h
|
||||
and <literal>0</literal> target offset from the new derivation's host
|
||||
platform. These are packages used at run-time to generate code also used
|
||||
at run-time. In practice, this would usually be tools used by compilers
|
||||
for macros or a metaprogramming system, or libraries used by the macros or
|
||||
metaprogramming code itself. It's always preferable to use a
|
||||
<varname>depsBuildBuild</varname> dependency in the derivation being built
|
||||
over a <varname>depsHostHost</varname> on the tool doing the building for
|
||||
this purpose.
|
||||
for macros or a metaprogramming system, or libraries used by the macros
|
||||
or metaprogramming code itself. It's always preferable to use a
|
||||
<varname>depsBuildBuild</varname> dependency in the derivation being
|
||||
built over a <varname>depsHostHost</varname> on the tool doing the
|
||||
building for this purpose.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -481,8 +485,8 @@ let f(h, h + 1, i) = i + h
|
||||
<literal>1</literal> target offset from the new derivation's host
|
||||
platform. This would be called <varname>depsHostTarget</varname> but for
|
||||
historical continuity. If the dependency doesn't care about the target
|
||||
platform (i.e. isn't a compiler or similar tool), put it here, rather than
|
||||
in <varname>depsBuildBuild</varname>.
|
||||
platform (i.e. isn't a compiler or similar tool), put it here, rather
|
||||
than in <varname>depsBuildBuild</varname>.
|
||||
</para>
|
||||
<para>
|
||||
These are often programs and libraries used by the new derivation at
|
||||
@ -664,10 +668,11 @@ passthru = {
|
||||
<literal>hello.baz.value1</literal>. We don't specify any usage or schema
|
||||
of <literal>passthru</literal> - it is meant for values that would be
|
||||
useful outside the derivation in other parts of a Nix expression (e.g. in
|
||||
other derivations). An example would be to convey some specific dependency
|
||||
of your derivation which contains a program with plugins support. Later,
|
||||
others who make derivations with plugins can use passed-through dependency
|
||||
to ensure that their plugin would be binary-compatible with built program.
|
||||
other derivations). An example would be to convey some specific
|
||||
dependency of your derivation which contains a program with plugins
|
||||
support. Later, others who make derivations with plugins can use
|
||||
passed-through dependency to ensure that their plugin would be
|
||||
binary-compatible with built program.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -677,9 +682,9 @@ passthru = {
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
A script to be run by <filename>maintainers/scripts/update.nix</filename> when
|
||||
the package is matched. It needs to be an executable file, either on the file
|
||||
system:
|
||||
A script to be run by <filename>maintainers/scripts/update.nix</filename>
|
||||
when the package is matched. It needs to be an executable file, either on
|
||||
the file system:
|
||||
<programlisting>
|
||||
passthru.updateScript = ./update.sh;
|
||||
</programlisting>
|
||||
@ -695,16 +700,24 @@ passthru.updateScript = writeScript "update-zoom-us" ''
|
||||
update-source-version zoom-us "$version"
|
||||
'';
|
||||
</programlisting>
|
||||
The attribute can also contain a list, a script followed by arguments to be passed to it:
|
||||
The attribute can also contain a list, a script followed by arguments to
|
||||
be passed to it:
|
||||
<programlisting>
|
||||
passthru.updateScript = [ ../../update.sh pname "--requested-release=unstable" ];
|
||||
</programlisting>
|
||||
Note that the update scripts will be run in parallel by default; you should avoid running <command>git commit</command> or any other commands that cannot handle that.
|
||||
Note that the update scripts will be run in parallel by default; you
|
||||
should avoid running <command>git commit</command> or any other commands
|
||||
that cannot handle that.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For information about how to run the updates, execute
|
||||
<cmdsynopsis><command>nix-shell</command> <arg>maintainers/scripts/update.nix</arg></cmdsynopsis>.
|
||||
<cmdsynopsis>
|
||||
<command>nix-shell</command>
|
||||
<arg>
|
||||
maintainers/scripts/update.nix
|
||||
</arg>
|
||||
</cmdsynopsis>
|
||||
.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -1178,8 +1191,8 @@ passthru.updateScript = [ ../../update.sh pname "--requested-release=unstable" ]
|
||||
By default, when cross compiling, the configure script has
|
||||
<option>--build=...</option> and <option>--host=...</option> passed.
|
||||
Packages can instead pass <literal>[ "build" "host" "target" ]</literal>
|
||||
or a subset to control exactly which platform flags are passed. Compilers
|
||||
and other tools can use this to also pass the target platform.
|
||||
or a subset to control exactly which platform flags are passed.
|
||||
Compilers and other tools can use this to also pass the target platform.
|
||||
<footnote xml:id="footnote-stdenv-build-time-guessing-impurity">
|
||||
<para>
|
||||
Eventually these will be passed building natively as well, to improve
|
||||
@ -1694,10 +1707,11 @@ installTargets = "install-bin install-doc";</programlisting>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
A package can export a <link linkend="ssec-setup-hooks">setup hook</link>
|
||||
by setting this variable. The setup hook, if defined, is copied to
|
||||
<filename>$out/nix-support/setup-hook</filename>. Environment variables
|
||||
are then substituted in it using <function
|
||||
A package can export a <link linkend="ssec-setup-hooks">setup
|
||||
hook</link> by setting this variable. The setup hook, if defined, is
|
||||
copied to <filename>$out/nix-support/setup-hook</filename>. Environment
|
||||
variables are then substituted in it using
|
||||
<function
|
||||
linkend="fun-substituteAll">substituteAll</function>.
|
||||
</para>
|
||||
</listitem>
|
||||
@ -1812,8 +1826,8 @@ set debug-file-directory ~/.nix-profile/lib/debug
|
||||
<listitem>
|
||||
<para>
|
||||
A list of dependencies used by the phase. This gets included in
|
||||
<varname>nativeBuildInputs</varname> when <varname>doInstallCheck</varname> is
|
||||
set.
|
||||
<varname>nativeBuildInputs</varname> when
|
||||
<varname>doInstallCheck</varname> is set.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2160,10 +2174,11 @@ someVar=$(stripHash $name)
|
||||
dependency derivation is already built just the same—depending is just
|
||||
needing something to exist, and needing is idempotent. However, a dependency
|
||||
specified twice will have its setup hook run twice, and that could easily
|
||||
change the build environment (though a well-written setup hook will therefore
|
||||
strive to be idempotent so this is in fact not observable). More broadly,
|
||||
setup hooks are anti-modular in that multiple dependencies, whether the same
|
||||
or different, should not interfere and yet their setup hooks may well do so.
|
||||
change the build environment (though a well-written setup hook will
|
||||
therefore strive to be idempotent so this is in fact not observable). More
|
||||
broadly, setup hooks are anti-modular in that multiple dependencies, whether
|
||||
the same or different, should not interfere and yet their setup hooks may
|
||||
well do so.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@ -2185,11 +2200,12 @@ someVar=$(stripHash $name)
|
||||
Returning to the C compiler wrapper example, if the wrapper itself is an
|
||||
<literal>n</literal> dependency, then it only wants to accumulate flags from
|
||||
<literal>n + 1</literal> dependencies, as only those ones match the
|
||||
compiler's target platform. The <envar>hostOffset</envar> variable is defined
|
||||
with the current dependency's host offset <envar>targetOffset</envar> with
|
||||
its target offset, before its setup hook is sourced. Additionally, since most
|
||||
environment hooks don't care about the target platform, that means the setup
|
||||
hook can append to the right bash array by doing something like
|
||||
compiler's target platform. The <envar>hostOffset</envar> variable is
|
||||
defined with the current dependency's host offset
|
||||
<envar>targetOffset</envar> with its target offset, before its setup hook is
|
||||
sourced. Additionally, since most environment hooks don't care about the
|
||||
target platform, that means the setup hook can append to the right bash
|
||||
array by doing something like
|
||||
<programlisting language="bash">
|
||||
addEnvHooks "$hostOffset" myBashFunction
|
||||
</programlisting>
|
||||
@ -2204,24 +2220,22 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
</para>
|
||||
|
||||
<para>
|
||||
First, let’s cover some setup hooks that are part of Nixpkgs
|
||||
default stdenv. This means that they are run for every package
|
||||
built using <function>stdenv.mkDerivation</function>. Some of
|
||||
these are platform specific, so they may run on Linux but not
|
||||
Darwin or vice-versa.
|
||||
<variablelist>
|
||||
First, let’s cover some setup hooks that are part of Nixpkgs default
|
||||
stdenv. This means that they are run for every package built using
|
||||
<function>stdenv.mkDerivation</function>. Some of these are platform
|
||||
specific, so they may run on Linux but not Darwin or vice-versa.
|
||||
<variablelist>
|
||||
<varlistentry>
|
||||
<term>
|
||||
<literal>move-docs.sh</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
<para>
|
||||
This setup hook moves any installed documentation to the
|
||||
<literal>/share</literal> subdirectory directory. This includes
|
||||
the man, doc and info directories. This is needed for legacy
|
||||
programs that do not know how to use the
|
||||
<literal>share</literal> subdirectory.
|
||||
</para>
|
||||
<literal>/share</literal> subdirectory directory. This includes the man,
|
||||
doc and info directories. This is needed for legacy programs that do not
|
||||
know how to use the <literal>share</literal> subdirectory.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2229,11 +2243,11 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<literal>compress-man-pages.sh</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This setup hook compresses any man pages that have been
|
||||
installed. The compression is done using the gzip program. This
|
||||
helps to reduce the installed size of packages.
|
||||
</para>
|
||||
<para>
|
||||
This setup hook compresses any man pages that have been installed. The
|
||||
compression is done using the gzip program. This helps to reduce the
|
||||
installed size of packages.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2241,12 +2255,11 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<literal>strip.sh</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This runs the strip command on installed binaries and
|
||||
libraries. This removes unnecessary information like debug
|
||||
symbols when they are not needed. This also helps to reduce the
|
||||
installed size of packages.
|
||||
</para>
|
||||
<para>
|
||||
This runs the strip command on installed binaries and libraries. This
|
||||
removes unnecessary information like debug symbols when they are not
|
||||
needed. This also helps to reduce the installed size of packages.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2254,15 +2267,14 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<literal>patch-shebangs.sh</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This setup hook patches installed scripts to use the full path
|
||||
to the shebang interpreter. A shebang interpreter is the first
|
||||
commented line of a script telling the operating system which
|
||||
program will run the script (e.g <literal>#!/bin/bash</literal>). In
|
||||
Nix, we want an exact path to that interpreter to be used. This
|
||||
often replaces <literal>/bin/sh</literal> with a path in the
|
||||
Nix store.
|
||||
</para>
|
||||
<para>
|
||||
This setup hook patches installed scripts to use the full path to the
|
||||
shebang interpreter. A shebang interpreter is the first commented line
|
||||
of a script telling the operating system which program will run the
|
||||
script (e.g <literal>#!/bin/bash</literal>). In Nix, we want an exact
|
||||
path to that interpreter to be used. This often replaces
|
||||
<literal>/bin/sh</literal> with a path in the Nix store.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2270,12 +2282,12 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<literal>audit-tmpdir.sh</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This verifies that no references are left from the install
|
||||
binaries to the directory used to build those binaries. This
|
||||
ensures that the binaries do not need things outside the Nix
|
||||
store. This is currently supported in Linux only.
|
||||
</para>
|
||||
<para>
|
||||
This verifies that no references are left from the install binaries to
|
||||
the directory used to build those binaries. This ensures that the
|
||||
binaries do not need things outside the Nix store. This is currently
|
||||
supported in Linux only.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2283,14 +2295,14 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<literal>multiple-outputs.sh</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This setup hook adds configure flags that tell packages to
|
||||
install files into any one of the proper outputs listed in
|
||||
<literal>outputs</literal>. This behavior can be turned off by setting
|
||||
<para>
|
||||
This setup hook adds configure flags that tell packages to install files
|
||||
into any one of the proper outputs listed in <literal>outputs</literal>.
|
||||
This behavior can be turned off by setting
|
||||
<literal>setOutputFlags</literal> to false in the derivation
|
||||
environment. See <xref linkend="chap-multiple-output"/> for
|
||||
more information.
|
||||
</para>
|
||||
environment. See <xref linkend="chap-multiple-output"/> for more
|
||||
information.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2298,11 +2310,11 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<literal>move-sbin.sh</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This setup hook moves any binaries installed in the sbin
|
||||
subdirectory into bin. In addition, a link is provided from
|
||||
sbin to bin for compatibility.
|
||||
</para>
|
||||
<para>
|
||||
This setup hook moves any binaries installed in the sbin subdirectory
|
||||
into bin. In addition, a link is provided from sbin to bin for
|
||||
compatibility.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2310,11 +2322,11 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<literal>move-lib64.sh</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This setup hook moves any libraries installed in the lib64
|
||||
subdirectory into lib. In addition, a link is provided from
|
||||
lib64 to lib for compatibility.
|
||||
</para>
|
||||
<para>
|
||||
This setup hook moves any libraries installed in the lib64 subdirectory
|
||||
into lib. In addition, a link is provided from lib64 to lib for
|
||||
compatibility.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2322,10 +2334,10 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<literal>set-source-date-epoch-to-latest.sh</literal>
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This sets <literal>SOURCE_DATE_EPOCH</literal> to the
|
||||
modification time of the most recent file.
|
||||
</para>
|
||||
<para>
|
||||
This sets <literal>SOURCE_DATE_EPOCH</literal> to the modification time
|
||||
of the most recent file.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2335,19 +2347,19 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<listitem>
|
||||
<para>
|
||||
The Bintools Wrapper wraps the binary utilities for a bunch of
|
||||
miscellaneous purposes. These are GNU Binutils when targetting Linux, and
|
||||
a mix of cctools and GNU binutils for Darwin. [The "Bintools" name is
|
||||
supposed to be a compromise between "Binutils" and "cctools" not denoting
|
||||
any specific implementation.] Specifically, the underlying bintools
|
||||
package, and a C standard library (glibc or Darwin's libSystem, just for
|
||||
the dynamic loader) are all fed in, and dependency finding, hardening
|
||||
(see below), and purity checks for each are handled by the Bintools
|
||||
Wrapper. Packages typically depend on CC Wrapper, which in turn (at run
|
||||
time) depends on the Bintools Wrapper.
|
||||
miscellaneous purposes. These are GNU Binutils when targetting Linux,
|
||||
and a mix of cctools and GNU binutils for Darwin. [The "Bintools" name
|
||||
is supposed to be a compromise between "Binutils" and "cctools" not
|
||||
denoting any specific implementation.] Specifically, the underlying
|
||||
bintools package, and a C standard library (glibc or Darwin's libSystem,
|
||||
just for the dynamic loader) are all fed in, and dependency finding,
|
||||
hardening (see below), and purity checks for each are handled by the
|
||||
Bintools Wrapper. Packages typically depend on CC Wrapper, which in turn
|
||||
(at run time) depends on the Bintools Wrapper.
|
||||
</para>
|
||||
<para>
|
||||
The Bintools Wrapper was only just recently split off from CC Wrapper, so
|
||||
the division of labor is still being worked out. For example, it
|
||||
The Bintools Wrapper was only just recently split off from CC Wrapper,
|
||||
so the division of labor is still being worked out. For example, it
|
||||
shouldn't care about about the C standard library, but just take a
|
||||
derivation with the dynamic loader (which happens to be the glibc on
|
||||
linux). Dependency finding however is a task both wrappers will continue
|
||||
@ -2357,11 +2369,12 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<varname>nativeBuildInputs</varname>) in environment variables. The
|
||||
Bintools Wrapper's setup hook causes any <filename>lib</filename> and
|
||||
<filename>lib64</filename> subdirectories to be added to
|
||||
<envar>NIX_LDFLAGS</envar>. Since the CC Wrapper and the Bintools Wrapper
|
||||
use the same strategy, most of the Bintools Wrapper code is sparsely
|
||||
commented and refers to the CC Wrapper. But the CC Wrapper's code, by
|
||||
contrast, has quite lengthy comments. The Bintools Wrapper merely cites
|
||||
those, rather than repeating them, to avoid falling out of sync.
|
||||
<envar>NIX_LDFLAGS</envar>. Since the CC Wrapper and the Bintools
|
||||
Wrapper use the same strategy, most of the Bintools Wrapper code is
|
||||
sparsely commented and refers to the CC Wrapper. But the CC Wrapper's
|
||||
code, by contrast, has quite lengthy comments. The Bintools Wrapper
|
||||
merely cites those, rather than repeating them, to avoid falling out of
|
||||
sync.
|
||||
</para>
|
||||
<para>
|
||||
A final task of the setup hook is defining a number of standard
|
||||
@ -2370,8 +2383,8 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
under the assumption that the Bintools Wrapper's binaries will be on the
|
||||
path. Firstly, this helps poorly-written packages, e.g. ones that look
|
||||
for just <command>gcc</command> when <envar>CC</envar> isn't defined yet
|
||||
<command>clang</command> is to be used. Secondly, this helps packages not
|
||||
get confused when cross-compiling, in which case multiple Bintools
|
||||
<command>clang</command> is to be used. Secondly, this helps packages
|
||||
not get confused when cross-compiling, in which case multiple Bintools
|
||||
Wrappers may simultaneously be in use.
|
||||
<footnote xml:id="footnote-stdenv-per-platform-wrapper">
|
||||
<para>
|
||||
@ -2387,16 +2400,16 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
Wrappers, properly disambiguating them.
|
||||
</para>
|
||||
<para>
|
||||
A problem with this final task is that the Bintools Wrapper is honest and
|
||||
defines <envar>LD</envar> as <command>ld</command>. Most packages,
|
||||
A problem with this final task is that the Bintools Wrapper is honest
|
||||
and defines <envar>LD</envar> as <command>ld</command>. Most packages,
|
||||
however, firstly use the C compiler for linking, secondly use
|
||||
<envar>LD</envar> anyways, defining it as the C compiler, and thirdly,
|
||||
only so define <envar>LD</envar> when it is undefined as a fallback. This
|
||||
triple-threat means Bintools Wrapper will break those packages, as LD is
|
||||
already defined as the actual linker which the package won't override yet
|
||||
doesn't want to use. The workaround is to define, just for the
|
||||
problematic package, <envar>LD</envar> as the C compiler. A good way to
|
||||
do this would be <command>preConfigure = "LD=$CC"</command>.
|
||||
only so define <envar>LD</envar> when it is undefined as a fallback.
|
||||
This triple-threat means Bintools Wrapper will break those packages, as
|
||||
LD is already defined as the actual linker which the package won't
|
||||
override yet doesn't want to use. The workaround is to define, just for
|
||||
the problematic package, <envar>LD</envar> as the C compiler. A good way
|
||||
to do this would be <command>preConfigure = "LD=$CC"</command>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2406,13 +2419,13 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
The CC Wrapper wraps a C toolchain for a bunch of miscellaneous purposes.
|
||||
Specifically, a C compiler (GCC or Clang), wrapped binary tools, and a C
|
||||
standard library (glibc or Darwin's libSystem, just for the dynamic
|
||||
loader) are all fed in, and dependency finding, hardening (see below),
|
||||
and purity checks for each are handled by the CC Wrapper. Packages
|
||||
typically depend on the CC Wrapper, which in turn (at run-time) depends
|
||||
on the Bintools Wrapper.
|
||||
The CC Wrapper wraps a C toolchain for a bunch of miscellaneous
|
||||
purposes. Specifically, a C compiler (GCC or Clang), wrapped binary
|
||||
tools, and a C standard library (glibc or Darwin's libSystem, just for
|
||||
the dynamic loader) are all fed in, and dependency finding, hardening
|
||||
(see below), and purity checks for each are handled by the CC Wrapper.
|
||||
Packages typically depend on the CC Wrapper, which in turn (at run-time)
|
||||
depends on the Bintools Wrapper.
|
||||
</para>
|
||||
<para>
|
||||
Dependency finding is undoubtedly the main task of the CC Wrapper. This
|
||||
@ -2434,14 +2447,13 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
</variablelist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are some more packages that provide a setup hook. Since the
|
||||
list of hooks is extensible, this is not an exhaustive list the
|
||||
mechanism is only to be used as a last resort, it might cover most
|
||||
uses.
|
||||
Here are some more packages that provide a setup hook. Since the list of
|
||||
hooks is extensible, this is not an exhaustive list the mechanism is only to
|
||||
be used as a last resort, it might cover most uses.
|
||||
<variablelist>
|
||||
<varlistentry>
|
||||
<term>
|
||||
@ -2499,11 +2511,11 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<listitem>
|
||||
<para>
|
||||
The <varname>autoreconfHook</varname> derivation adds
|
||||
<varname>autoreconfPhase</varname>, which runs autoreconf, libtoolize and
|
||||
automake, essentially preparing the configure script in autotools-based
|
||||
builds. Most autotools-based packages come with the configure script
|
||||
pre-generated, but this hook is necessary for a few packages and when you
|
||||
need to patch the package’s configure scripts.
|
||||
<varname>autoreconfPhase</varname>, which runs autoreconf, libtoolize
|
||||
and automake, essentially preparing the configure script in
|
||||
autotools-based builds. Most autotools-based packages come with the
|
||||
configure script pre-generated, but this hook is necessary for a few
|
||||
packages and when you need to patch the package’s configure scripts.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2547,9 +2559,9 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Exports <envar>GDK_PIXBUF_MODULE_FILE</envar> environment variable to the
|
||||
builder. Add librsvg package to <varname>buildInputs</varname> to get svg
|
||||
support.
|
||||
Exports <envar>GDK_PIXBUF_MODULE_FILE</envar> environment variable to
|
||||
the builder. Add librsvg package to <varname>buildInputs</varname> to
|
||||
get svg support.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2594,21 +2606,20 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
</para>
|
||||
<para>
|
||||
This is useful for programs that use <citerefentry>
|
||||
<refentrytitle>dlopen</refentrytitle>
|
||||
<manvolnum>3</manvolnum>
|
||||
</citerefentry> to load libraries at runtime.
|
||||
<refentrytitle>dlopen</refentrytitle>
|
||||
<manvolnum>3</manvolnum> </citerefentry> to load libraries at runtime.
|
||||
</para>
|
||||
<para>
|
||||
In certain situations you may want to run the main command
|
||||
(<command>autoPatchelf</command>) of the setup hook on a file or a set
|
||||
of directories instead of unconditionally patching all outputs. This
|
||||
can be done by setting the <envar>dontAutoPatchelf</envar> environment
|
||||
variable to a non-empty value.
|
||||
In certain situations you may want to run the main command
|
||||
(<command>autoPatchelf</command>) of the setup hook on a file or a set
|
||||
of directories instead of unconditionally patching all outputs. This can
|
||||
be done by setting the <envar>dontAutoPatchelf</envar> environment
|
||||
variable to a non-empty value.
|
||||
</para>
|
||||
<para>
|
||||
The <command>autoPatchelf</command> command also recognizes a
|
||||
<parameter class="command">--no-recurse</parameter> command line flag,
|
||||
which prevents it from recursing into subdirectories.
|
||||
The <command>autoPatchelf</command> command also recognizes a
|
||||
<parameter class="command">--no-recurse</parameter> command line flag,
|
||||
which prevents it from recursing into subdirectories.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2619,22 +2630,22 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
<listitem>
|
||||
<para>
|
||||
This hook will make a build pause instead of stopping when a failure
|
||||
happens. It prevents nix from cleaning up the build environment immediately and
|
||||
allows the user to attach to a build environment using the
|
||||
<command>cntr</command> command. Upon build error it will print
|
||||
instructions on how to use <command>cntr</command>. Installing
|
||||
cntr and running the command will provide shell access to the build
|
||||
sandbox of failed build. At <filename>/var/lib/cntr</filename> the
|
||||
sandboxed filesystem is mounted. All commands and files of the system are
|
||||
still accessible within the shell. To execute commands from the sandbox
|
||||
use the cntr exec subcommand. Note that <command>cntr</command> also
|
||||
needs to be executed on the machine that is doing the build, which might
|
||||
not be the case when remote builders are enabled.
|
||||
<command>cntr</command> is only supported on Linux-based platforms. To
|
||||
use it first add <literal>cntr</literal> to your
|
||||
<literal>environment.systemPackages</literal> on NixOS or alternatively to
|
||||
the root user on non-NixOS systems. Then in the package that is supposed
|
||||
to be inspected, add <literal>breakpointHook</literal> to
|
||||
happens. It prevents nix from cleaning up the build environment
|
||||
immediately and allows the user to attach to a build environment using
|
||||
the <command>cntr</command> command. Upon build error it will print
|
||||
instructions on how to use <command>cntr</command>. Installing cntr and
|
||||
running the command will provide shell access to the build sandbox of
|
||||
failed build. At <filename>/var/lib/cntr</filename> the sandboxed
|
||||
filesystem is mounted. All commands and files of the system are still
|
||||
accessible within the shell. To execute commands from the sandbox use
|
||||
the cntr exec subcommand. Note that <command>cntr</command> also needs
|
||||
to be executed on the machine that is doing the build, which might not
|
||||
be the case when remote builders are enabled. <command>cntr</command> is
|
||||
only supported on Linux-based platforms. To use it first add
|
||||
<literal>cntr</literal> to your
|
||||
<literal>environment.systemPackages</literal> on NixOS or alternatively
|
||||
to the root user on non-NixOS systems. Then in the package that is
|
||||
supposed to be inspected, add <literal>breakpointHook</literal> to
|
||||
<literal>nativeBuildInputs</literal>.
|
||||
<programlisting>
|
||||
nativeBuildInputs = [ breakpointHook ];
|
||||
@ -2649,16 +2660,15 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
libiconv, libintl
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
A few libraries automatically add to
|
||||
<literal>NIX_LDFLAGS</literal> their library, making their
|
||||
symbols automatically available to the linker. This includes
|
||||
libiconv and libintl (gettext). This is done to provide
|
||||
compatibility between GNU Linux, where libiconv and libintl
|
||||
are bundled in, and other systems where that might not be the
|
||||
case. Sometimes, this behavior is not desired. To disable
|
||||
this behavior, set <literal>dontAddExtraLibs</literal>.
|
||||
</para>
|
||||
<para>
|
||||
A few libraries automatically add to <literal>NIX_LDFLAGS</literal>
|
||||
their library, making their symbols automatically available to the
|
||||
linker. This includes libiconv and libintl (gettext). This is done to
|
||||
provide compatibility between GNU Linux, where libiconv and libintl are
|
||||
bundled in, and other systems where that might not be the case.
|
||||
Sometimes, this behavior is not desired. To disable this behavior, set
|
||||
<literal>dontAddExtraLibs</literal>.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
<varlistentry>
|
||||
@ -2666,17 +2676,17 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
cmake
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Overrides the default configure phase to run the CMake command. By
|
||||
default, we use the Make generator of CMake. In
|
||||
addition, dependencies are added automatically to CMAKE_PREFIX_PATH so
|
||||
that packages are correctly detected by CMake. Some additional flags
|
||||
are passed in to give similar behavior to configure-based packages. You
|
||||
can disable this hook’s behavior by setting configurePhase to a custom
|
||||
value, or by setting dontUseCmakeConfigure. cmakeFlags controls flags
|
||||
passed only to CMake. By default, parallel building is enabled as CMake
|
||||
supports parallel building almost everywhere. When Ninja is also in
|
||||
use, CMake will detect that and use the ninja generator.
|
||||
<para>
|
||||
Overrides the default configure phase to run the CMake command. By
|
||||
default, we use the Make generator of CMake. In addition, dependencies
|
||||
are added automatically to CMAKE_PREFIX_PATH so that packages are
|
||||
correctly detected by CMake. Some additional flags are passed in to give
|
||||
similar behavior to configure-based packages. You can disable this
|
||||
hook’s behavior by setting configurePhase to a custom value, or by
|
||||
setting dontUseCmakeConfigure. cmakeFlags controls flags passed only to
|
||||
CMake. By default, parallel building is enabled as CMake supports
|
||||
parallel building almost everywhere. When Ninja is also in use, CMake
|
||||
will detect that and use the ninja generator.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2685,12 +2695,12 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
xcbuildHook
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Overrides the build and install phases to run the “xcbuild” command.
|
||||
This hook is needed when a project only comes with build files for the
|
||||
XCode build system. You can disable this behavior by setting buildPhase
|
||||
and configurePhase to a custom value. xcbuildFlags controls flags
|
||||
passed only to xcbuild.
|
||||
<para>
|
||||
Overrides the build and install phases to run the “xcbuild” command.
|
||||
This hook is needed when a project only comes with build files for the
|
||||
XCode build system. You can disable this behavior by setting buildPhase
|
||||
and configurePhase to a custom value. xcbuildFlags controls flags passed
|
||||
only to xcbuild.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2699,13 +2709,13 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
meson
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Overrides the configure phase to run meson to generate Ninja files. You
|
||||
can disable this behavior by setting configurePhase to a custom value,
|
||||
or by setting dontUseMesonConfigure. To run these files, you should
|
||||
accompany meson with ninja. mesonFlags controls only the flags passed
|
||||
to meson. By default, parallel building is enabled as Meson supports
|
||||
parallel building almost everywhere.
|
||||
<para>
|
||||
Overrides the configure phase to run meson to generate Ninja files. You
|
||||
can disable this behavior by setting configurePhase to a custom value,
|
||||
or by setting dontUseMesonConfigure. To run these files, you should
|
||||
accompany meson with ninja. mesonFlags controls only the flags passed to
|
||||
meson. By default, parallel building is enabled as Meson supports
|
||||
parallel building almost everywhere.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2714,11 +2724,11 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
ninja
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Overrides the build, install, and check phase to run ninja instead of
|
||||
make. You can disable this behavior with the dontUseNinjaBuild,
|
||||
dontUseNinjaInstall, and dontUseNinjaCheck, respectively. Parallel
|
||||
building is enabled by default in Ninja.
|
||||
<para>
|
||||
Overrides the build, install, and check phase to run ninja instead of
|
||||
make. You can disable this behavior with the dontUseNinjaBuild,
|
||||
dontUseNinjaInstall, and dontUseNinjaCheck, respectively. Parallel
|
||||
building is enabled by default in Ninja.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2727,9 +2737,9 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
unzip
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
This setup hook will allow you to unzip .zip files specified in $src.
|
||||
There are many similar packages like unrar, undmg, etc.
|
||||
<para>
|
||||
This setup hook will allow you to unzip .zip files specified in $src.
|
||||
There are many similar packages like unrar, undmg, etc.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2738,11 +2748,11 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
wafHook
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Overrides the configure, build, and install phases. This will run the
|
||||
"waf" script used by many projects. If waf doesn’t exist, it will copy
|
||||
the version of waf available in Nixpkgs wafFlags can be used to pass
|
||||
flags to the waf script.
|
||||
<para>
|
||||
Overrides the configure, build, and install phases. This will run the
|
||||
"waf" script used by many projects. If waf doesn’t exist, it will copy
|
||||
the version of waf available in Nixpkgs wafFlags can be used to pass
|
||||
flags to the waf script.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
@ -2751,14 +2761,14 @@ addEnvHooks "$hostOffset" myBashFunction
|
||||
scons
|
||||
</term>
|
||||
<listitem>
|
||||
<para>
|
||||
Overrides the build, install, and check phases. This uses the scons
|
||||
build system as a replacement for make. scons does not provide a
|
||||
configure phase, so everything is managed at build and install time.
|
||||
<para>
|
||||
Overrides the build, install, and check phases. This uses the scons
|
||||
build system as a replacement for make. scons does not provide a
|
||||
configure phase, so everything is managed at build and install time.
|
||||
</para>
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
</variablelist>
|
||||
</para>
|
||||
</section>
|
||||
<section xml:id="sec-purity-in-nixpkgs">
|
||||
|
@ -59,7 +59,7 @@ let
|
||||
stringLength sub substring tail;
|
||||
inherit (trivial) id const concat or and bitAnd bitOr bitXor bitNot
|
||||
boolToString mergeAttrs flip mapNullable inNixShell min max
|
||||
importJSON warn info nixpkgsVersion version mod compare
|
||||
importJSON warn info showWarnings nixpkgsVersion version mod compare
|
||||
splitByAndCompare functionArgs setFunctionArgs isFunction;
|
||||
inherit (fixedPoints) fix fix' converge extends composeExtensions
|
||||
makeExtensible makeExtensibleWithCustomName;
|
||||
|
@ -476,8 +476,22 @@ rec {
|
||||
optionSet to options of type submodule. FIXME: remove
|
||||
eventually. */
|
||||
fixupOptionType = loc: opt:
|
||||
if opt.type.getSubModules or null == null
|
||||
then opt // { type = opt.type or types.unspecified; }
|
||||
let
|
||||
options = opt.options or
|
||||
(throw "Option `${showOption loc'}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}.");
|
||||
f = tp:
|
||||
let optionSetIn = type: (tp.name == type) && (tp.functor.wrapped.name == "optionSet");
|
||||
in
|
||||
if tp.name == "option set" || tp.name == "submodule" then
|
||||
throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}."
|
||||
else if optionSetIn "attrsOf" then types.attrsOf (types.submodule options)
|
||||
else if optionSetIn "loaOf" then types.loaOf (types.submodule options)
|
||||
else if optionSetIn "listOf" then types.listOf (types.submodule options)
|
||||
else if optionSetIn "nullOr" then types.nullOr (types.submodule options)
|
||||
else tp;
|
||||
in
|
||||
if opt.type.getSubModules or null == null
|
||||
then opt // { type = f (opt.type or types.unspecified); }
|
||||
else opt // { type = opt.type.substSubModules opt.options; options = []; };
|
||||
|
||||
|
||||
|
@ -48,6 +48,8 @@ rec {
|
||||
visible ? null,
|
||||
# Whether the option can be set only once
|
||||
readOnly ? null,
|
||||
# Deprecated, used by types.optionSet.
|
||||
options ? null
|
||||
} @ attrs:
|
||||
attrs // { _type = "option"; };
|
||||
|
||||
@ -141,7 +143,7 @@ rec {
|
||||
docOption = rec {
|
||||
loc = opt.loc;
|
||||
name = showOption opt.loc;
|
||||
description = opt.description or (throw "Option `${name}' has no description.");
|
||||
description = opt.description or (lib.warn "Option `${name}' has no description." "This option has no description.");
|
||||
declarations = filter (x: x != unknownModule) opt.declarations;
|
||||
internal = opt.internal or false;
|
||||
visible = opt.visible or true;
|
||||
|
@ -259,9 +259,10 @@ rec {
|
||||
# TODO: figure out a clever way to integrate location information from
|
||||
# something like __unsafeGetAttrPos.
|
||||
|
||||
warn = msg: builtins.trace "WARNING: ${msg}";
|
||||
warn = msg: builtins.trace "[1;31mwarning: ${msg}[0m";
|
||||
info = msg: builtins.trace "INFO: ${msg}";
|
||||
|
||||
showWarnings = warnings: res: lib.fold (w: x: warn w x) res warnings;
|
||||
|
||||
## Function annotations
|
||||
|
||||
|
@ -469,8 +469,10 @@ rec {
|
||||
# Obsolete alternative to configOf. It takes its option
|
||||
# declarations from the ‘options’ attribute of containing option
|
||||
# declaration.
|
||||
optionSet = builtins.throw "types.optionSet is deprecated; use types.submodule instead" "optionSet";
|
||||
|
||||
optionSet = mkOptionType {
|
||||
name = builtins.trace "types.optionSet is deprecated; use types.submodule instead" "optionSet";
|
||||
description = "option set";
|
||||
};
|
||||
# Augment the given type with an additional type check function.
|
||||
addCheck = elemType: check: elemType // { check = x: elemType.check x && check x; };
|
||||
|
||||
|
@ -38,6 +38,15 @@
|
||||
See `./scripts/check-maintainer-github-handles.sh` for an example on how to work with this data.
|
||||
*/
|
||||
{
|
||||
"0x4A6F" = {
|
||||
email = "0x4A6F@shackspace.de";
|
||||
name = "Joachim Ernst";
|
||||
github = "0x4A6F";
|
||||
keys = [{
|
||||
longkeyid = "rsa8192/0x87027528B006D66D";
|
||||
fingerprint = "F466 A548 AD3F C1F1 8C88 4576 8702 7528 B006 D66D";
|
||||
}];
|
||||
};
|
||||
"1000101" = {
|
||||
email = "jan.hrnko@satoshilabs.com";
|
||||
github = "1000101";
|
||||
@ -2237,10 +2246,6 @@
|
||||
github = "jmettes";
|
||||
name = "Jonathan Mettes";
|
||||
};
|
||||
Jo = {
|
||||
email = "0x4A6F@shackspace.de";
|
||||
name = "Joachim Ernst";
|
||||
};
|
||||
joachifm = {
|
||||
email = "joachifm@fastmail.fm";
|
||||
github = "joachifm";
|
||||
@ -2643,6 +2648,11 @@
|
||||
github = "lihop";
|
||||
name = "Leroy Hopson";
|
||||
};
|
||||
lilyball = {
|
||||
email = "lily@sb.org";
|
||||
github = "lilyball";
|
||||
name = "Lily Ballard";
|
||||
};
|
||||
limeytexan = {
|
||||
email = "limeytexan@gmail.com";
|
||||
github = "limeytexan";
|
||||
|
@ -1,7 +1,7 @@
|
||||
# nix name, luarocks name, server, version/additionnal args
|
||||
ansicolors,
|
||||
argparse,
|
||||
basexx,
|
||||
cqueues
|
||||
dkjson
|
||||
fifo
|
||||
inspect
|
||||
@ -18,15 +18,15 @@ lua-term,
|
||||
luabitop,
|
||||
luaevent,
|
||||
luacheck
|
||||
luaffi,http://luarocks.org/dev,
|
||||
luaffi,,http://luarocks.org/dev,
|
||||
luuid,
|
||||
penlight,
|
||||
say,
|
||||
luv,
|
||||
luasystem,
|
||||
mediator_lua,http://luarocks.org/manifests/teto
|
||||
mpack,http://luarocks.org/manifests/teto
|
||||
nvim-client,http://luarocks.org/manifests/teto
|
||||
busted,http://luarocks.org/manifests/teto
|
||||
luassert,http://luarocks.org/manifests/teto
|
||||
coxpcall,https://luarocks.org/manifests/hisham,1.17.0-1
|
||||
mediator_lua,,http://luarocks.org/manifests/teto
|
||||
mpack,,http://luarocks.org/manifests/teto
|
||||
nvim-client,,http://luarocks.org/manifests/teto
|
||||
busted,,http://luarocks.org/manifests/teto
|
||||
luassert,,http://luarocks.org/manifests/teto
|
||||
coxpcall,,https://luarocks.org/manifests/hisham,1.17.0-1
|
||||
|
|
@ -74,17 +74,18 @@ FOOTER="
|
||||
|
||||
|
||||
function convert_pkg () {
|
||||
pkg="$1"
|
||||
nix_pkg_name="$1"
|
||||
lua_pkg_name="$2"
|
||||
server=""
|
||||
if [ ! -z "$2" ]; then
|
||||
server=" --server=$2"
|
||||
if [ ! -z "$3" ]; then
|
||||
server=" --server=$3"
|
||||
fi
|
||||
|
||||
version="${3:-}"
|
||||
|
||||
echo "looking at $pkg (version $version) from server [$server]" >&2
|
||||
cmd="luarocks nix $server $pkg $version"
|
||||
drv="$($cmd)"
|
||||
echo "looking at $lua_pkg_name (version $version) from server [$server]" >&2
|
||||
cmd="luarocks nix $server $lua_pkg_name $version"
|
||||
drv="$nix_pkg_name = $($cmd)"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to convert $pkg" >&2
|
||||
echo "$drv" >&2
|
||||
@ -98,12 +99,17 @@ echo "$HEADER" | tee "$TMP_FILE"
|
||||
|
||||
# list of packages with format
|
||||
# name,server,version
|
||||
while IFS=, read -r pkg_name server version
|
||||
while IFS=, read -r nix_pkg_name lua_pkg_name server version
|
||||
do
|
||||
if [ -z "$pkg_name" ]; then
|
||||
echo "Skipping empty package name" >&2
|
||||
if [ "${nix_pkg_name:0:1}" == "#" ]; then
|
||||
echo "Skipping comment ${nix_pkg_name}" >&2
|
||||
continue
|
||||
fi
|
||||
convert_pkg "$pkg_name" "$server" "$version"
|
||||
if [ -z "$lua_pkg_name" ]; then
|
||||
echo "Using nix_name as lua_pkg_name" >&2
|
||||
lua_pkg_name="$nix_pkg_name"
|
||||
fi
|
||||
convert_pkg "$nix_pkg_name" "$lua_pkg_name" "$server" "$version"
|
||||
done < "$CSV_FILE"
|
||||
|
||||
# close the set
|
||||
|
@ -4,7 +4,7 @@
|
||||
version="5.0"
|
||||
xml:id="ch-running">
|
||||
<title>Administration</title>
|
||||
<partintro>
|
||||
<partintro xml:id="ch-running-intro">
|
||||
<para>
|
||||
This chapter describes various aspects of managing a running NixOS system,
|
||||
such as how to use the <command>systemd</command> service manager.
|
||||
|
@ -4,7 +4,7 @@
|
||||
version="5.0"
|
||||
xml:id="ch-configuration">
|
||||
<title>Configuration</title>
|
||||
<partintro>
|
||||
<partintro xml:id="ch-configuration-intro">
|
||||
<para>
|
||||
This chapter describes how to configure various aspects of a NixOS machine
|
||||
through the configuration file
|
||||
|
@ -36,8 +36,25 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you are using WPA2 the <command>wpa_passphrase</command> tool might be
|
||||
useful to generate the <literal>wpa_supplicant.conf</literal>.
|
||||
If you are using WPA2 you can generate pskRaw key using
|
||||
<command>wpa_passphrase</command>:
|
||||
<screen>
|
||||
$ wpa_passphrase ESSID PSK
|
||||
network={
|
||||
ssid="echelon"
|
||||
#psk="abcdefgh"
|
||||
psk=dca6d6ed41f4ab5a984c9f55f6f66d4efdc720ebf66959810f4329bb391c5435
|
||||
}
|
||||
</screen>
|
||||
<programlisting>
|
||||
<xref linkend="opt-networking.wireless.networks"/> = {
|
||||
echelon = {
|
||||
pskRaw = "dca6d6ed41f4ab5a984c9f55f6f66d4efdc720ebf66959810f4329bb391c5435";
|
||||
};
|
||||
}
|
||||
</programlisting>
|
||||
or you can use it to directly generate the
|
||||
<literal>wpa_supplicant.conf</literal>:
|
||||
<screen>
|
||||
# wpa_passphrase ESSID PSK > /etc/wpa_supplicant.conf</screen>
|
||||
After you have edited the <literal>wpa_supplicant.conf</literal>, you need to
|
||||
|
@ -268,7 +268,10 @@ in rec {
|
||||
--stringparam id.warnings "1" \
|
||||
--nonet --output $dst/ \
|
||||
${docbook_xsl_ns}/xml/xsl/docbook/xhtml/chunktoc.xsl \
|
||||
${manual-combined}/manual-combined.xml
|
||||
${manual-combined}/manual-combined.xml \
|
||||
|& tee xsltproc.out
|
||||
grep "^ID recommended on" xsltproc.out &>/dev/null && echo "error: some IDs are missing" && false
|
||||
rm xsltproc.out
|
||||
|
||||
mkdir -p $dst/images/callouts
|
||||
cp ${docbook_xsl_ns}/xml/xsl/docbook/images/callouts/*.svg $dst/images/callouts/
|
||||
@ -327,6 +330,7 @@ in rec {
|
||||
# Generate manpages.
|
||||
mkdir -p $out/share/man
|
||||
xsltproc --nonet \
|
||||
--maxdepth 6000 \
|
||||
--param man.output.in.separate.dir 1 \
|
||||
--param man.output.base.dir "'$out/share/man/'" \
|
||||
--param man.endnotes.are.numbered 0 \
|
||||
|
@ -4,7 +4,7 @@
|
||||
version="5.0"
|
||||
xml:id="ch-development">
|
||||
<title>Development</title>
|
||||
<partintro>
|
||||
<partintro xml:id="ch-development-intro">
|
||||
<para>
|
||||
This chapter describes how you can modify and extend NixOS.
|
||||
</para>
|
||||
|
@ -4,7 +4,7 @@
|
||||
version="5.0"
|
||||
xml:id="ch-installation">
|
||||
<title>Installation</title>
|
||||
<partintro>
|
||||
<partintro xml:id="ch-installation-intro">
|
||||
<para>
|
||||
This section describes how to obtain, install, and configure NixOS for
|
||||
first-time use.
|
||||
|
@ -377,6 +377,10 @@
|
||||
option can be set to <literal>true</literal> to automatically add them to
|
||||
the grub menu.
|
||||
</para>
|
||||
<para>
|
||||
If you need to configure networking for your machine the configuration
|
||||
options are described in <xref linkend="sec-networking"/>.
|
||||
</para>
|
||||
<para>
|
||||
Another critical option is <option>fileSystems</option>, specifying the
|
||||
file systems that need to be mounted by NixOS. However, you typically
|
||||
|
@ -456,8 +456,8 @@
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
Support for NixOS module system type <literal>types.optionSet</literal> and
|
||||
<literal>lib.mkOption</literal> argument <literal>options</literal> is removed.
|
||||
NixOS module system type <literal>types.optionSet</literal> and
|
||||
<literal>lib.mkOption</literal> argument <literal>options</literal> are deprecated.
|
||||
Use <literal>types.submodule</literal> instead.
|
||||
(<link xlink:href="https://github.com/NixOS/nixpkgs/pull/54637">#54637</link>)
|
||||
</para>
|
||||
@ -577,7 +577,7 @@
|
||||
but is still possible by setting <literal>zramSwap.swapDevices</literal> explicitly.
|
||||
</para>
|
||||
<para>
|
||||
Default algorithm for ZRAM swap was changed to <literal>zstd</literal>.
|
||||
ZRAM algorithm can be changed now.
|
||||
</para>
|
||||
<para>
|
||||
Changes to ZRAM algorithm are applied during <literal>nixos-rebuild switch</literal>,
|
||||
@ -666,6 +666,10 @@
|
||||
Some OpenCL and VA-API applications might also break
|
||||
(Beignet seems to provide OpenCL support with
|
||||
<literal>modesetting</literal> driver, too).
|
||||
Kernel mode setting API does not support backlight control,
|
||||
so <literal>xbacklight</literal> tool will not work;
|
||||
backlight level can be controlled directly via <literal>/sys/</literal>
|
||||
or with <literal>brightnessctl</literal>.
|
||||
Users who need this functionality more than multi-output XRandR are advised
|
||||
to add `intel` to `videoDrivers` and report an issue (or provide additional
|
||||
details in an existing one)
|
||||
@ -677,6 +681,9 @@
|
||||
This may break some older applications that still rely on those symbols.
|
||||
An upgrade guide can be found <link xlink:href="https://www.open-mpi.org/faq/?category=mpi-removed">here</link>.
|
||||
</para>
|
||||
<para>
|
||||
The nginx package now relies on OpenSSL 1.1 and supports TLS 1.3 by default. You can set the protocols used by the nginx service using <xref linkend="opt-services.nginx.sslProtocols"/>.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
|
@ -51,7 +51,17 @@
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para />
|
||||
<para>
|
||||
The <option>documentation</option> module gained an option named
|
||||
<option>documentation.nixos.includeAllModules</option> which makes the generated
|
||||
<citerefentry><refentrytitle>configuration.nix</refentrytitle>
|
||||
<manvolnum>5</manvolnum></citerefentry> manual page include all options from all NixOS modules
|
||||
included in a given <literal>configuration.nix</literal> configuration file. Currently, it is
|
||||
set to <literal>false</literal> by default as enabling it frequently prevents evaluation. But
|
||||
the plan is to eventually have it set to <literal>true</literal> by default. Please set it to
|
||||
<literal>true</literal> now in your <literal>configuration.nix</literal> and fix all the bugs
|
||||
it uncovers.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</section>
|
||||
|
@ -51,7 +51,7 @@ in rec {
|
||||
# system configuration.
|
||||
inherit (lib.evalModules {
|
||||
inherit prefix check;
|
||||
modules = modules ++ extraModules ++ baseModules ++ [ pkgsModule ];
|
||||
modules = baseModules ++ extraModules ++ [ pkgsModule ] ++ modules;
|
||||
args = extraArgs;
|
||||
specialArgs =
|
||||
{ modulesPath = builtins.toString ../modules; } // specialArgs;
|
||||
@ -60,7 +60,7 @@ in rec {
|
||||
# These are the extra arguments passed to every module. In
|
||||
# particular, Nixpkgs is passed through the "pkgs" argument.
|
||||
extraArgs = extraArgs_ // {
|
||||
inherit modules baseModules;
|
||||
inherit baseModules extraModules modules;
|
||||
};
|
||||
|
||||
inherit (config._module.args) pkgs;
|
||||
|
@ -91,13 +91,13 @@ in
|
||||
};
|
||||
|
||||
algorithm = mkOption {
|
||||
default = "zstd";
|
||||
example = "lzo";
|
||||
default = "lzo";
|
||||
example = "lz4";
|
||||
type = with types; either (enum [ "lzo" "lz4" "zstd" ]) str;
|
||||
description = ''
|
||||
Compression algorithm. <literal>lzo</literal> has good compression,
|
||||
but is slow. <literal>lz4</literal> has bad compression, but is fast.
|
||||
<literal>zstd</literal> is both good compression and fast.
|
||||
<literal>zstd</literal> is both good compression and fast, but requires newer kernel.
|
||||
You can check what other algorithms are supported by your zram device with
|
||||
<programlisting>cat /sys/class/block/zram*/comp_algorithm</programlisting>
|
||||
'';
|
||||
|
@ -138,7 +138,18 @@ fi
|
||||
# Ask the user to set a root password, but only if the passwd command
|
||||
# exists (i.e. when mutable user accounts are enabled).
|
||||
if [[ -z $noRootPasswd ]] && [ -t 0 ]; then
|
||||
nixos-enter --root "$mountPoint" -c '[[ -e /nix/var/nix/profiles/system/sw/bin/passwd ]] && echo "setting root password..." && /nix/var/nix/profiles/system/sw/bin/passwd'
|
||||
if nixos-enter --root "$mountPoint" -c 'test -e /nix/var/nix/profiles/system/sw/bin/passwd'; then
|
||||
set +e
|
||||
nixos-enter --root "$mountPoint" -c 'echo "setting root password..." && /nix/var/nix/profiles/system/sw/bin/passwd'
|
||||
exit_code=$?
|
||||
set -e
|
||||
|
||||
if [[ $exit_code != 0 ]]; then
|
||||
echo "Setting a root password failed with the above printed error."
|
||||
echo "You can set the root password manually by executing \`nixos-enter --root ${mountPoint@Q}\` and then running \`passwd\` in the shell of the new system."
|
||||
exit $exit_code
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "installation finished!"
|
||||
|
@ -57,7 +57,5 @@ with lib;
|
||||
|
||||
# Enable the OpenSSH daemon.
|
||||
# services.openssh.enable = true;
|
||||
|
||||
system.stateVersion = mkDefault "18.03";
|
||||
'';
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
{ config, lib, pkgs, baseModules, ... }:
|
||||
{ config, lib, pkgs, baseModules, extraModules, modules, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
@ -6,6 +6,8 @@ let
|
||||
|
||||
cfg = config.documentation;
|
||||
|
||||
manualModules = baseModules ++ optionals cfg.nixos.includeAllModules (extraModules ++ modules);
|
||||
|
||||
/* For the purpose of generating docs, evaluate options with each derivation
|
||||
in `pkgs` (recursively) replaced by a fake with path "\${pkgs.attribute.path}".
|
||||
It isn't perfect, but it seems to cover a vast majority of use cases.
|
||||
@ -18,7 +20,7 @@ let
|
||||
options =
|
||||
let
|
||||
scrubbedEval = evalModules {
|
||||
modules = [ { nixpkgs.localSystem = config.nixpkgs.localSystem; } ] ++ baseModules;
|
||||
modules = [ { nixpkgs.localSystem = config.nixpkgs.localSystem; } ] ++ manualModules;
|
||||
args = (config._module.args) // { modules = [ ]; };
|
||||
specialArgs = { pkgs = scrubDerivations "pkgs" pkgs; };
|
||||
};
|
||||
@ -146,6 +148,17 @@ in
|
||||
'';
|
||||
};
|
||||
|
||||
nixos.includeAllModules = mkOption {
|
||||
type = types.bool;
|
||||
default = false;
|
||||
description = ''
|
||||
Whether the generated NixOS's documentation should include documentation for all
|
||||
the options from all the NixOS modules included in the current
|
||||
<literal>configuration.nix</literal>. Disabling this will make the manual
|
||||
generator to ignore options defined outside of <literal>baseModules</literal>.
|
||||
'';
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
};
|
||||
|
@ -182,6 +182,7 @@
|
||||
./services/audio/mpd.nix
|
||||
./services/audio/mopidy.nix
|
||||
./services/audio/slimserver.nix
|
||||
./services/audio/snapserver.nix
|
||||
./services/audio/squeezelite.nix
|
||||
./services/audio/ympd.nix
|
||||
./services/backup/bacula.nix
|
||||
@ -189,6 +190,7 @@
|
||||
./services/backup/duplicati.nix
|
||||
./services/backup/crashplan.nix
|
||||
./services/backup/crashplan-small-business.nix
|
||||
./services/backup/duplicity.nix
|
||||
./services/backup/mysql-backup.nix
|
||||
./services/backup/postgresql-backup.nix
|
||||
./services/backup/restic.nix
|
||||
@ -338,6 +340,7 @@
|
||||
./services/logging/syslog-ng.nix
|
||||
./services/logging/syslogd.nix
|
||||
./services/mail/clamsmtp.nix
|
||||
./services/mail/davmail.nix
|
||||
./services/mail/dkimproxy-out.nix
|
||||
./services/mail/dovecot.nix
|
||||
./services/mail/dspam.nix
|
||||
@ -425,7 +428,7 @@
|
||||
./services/misc/parsoid.nix
|
||||
./services/misc/phd.nix
|
||||
./services/misc/plex.nix
|
||||
./services/misc/plexpy.nix
|
||||
./services/misc/tautulli.nix
|
||||
./services/misc/pykms.nix
|
||||
./services/misc/radarr.nix
|
||||
./services/misc/redmine.nix
|
||||
|
@ -102,7 +102,7 @@ in
|
||||
# Emacs term mode doesn't support xterm title escape sequence (\e]0;)
|
||||
PS1="\n\[\033[$PROMPT_COLOR\][\u@\h:\w]\\$\[\033[0m\] "
|
||||
else
|
||||
PS1="\n\[\033[$PROMPT_COLOR\][\[\e]0;\u@\h: \w\a\]\u@\h:\w]\$\[\033[0m\] "
|
||||
PS1="\n\[\033[$PROMPT_COLOR\][\[\e]0;\u@\h: \w\a\]\u@\h:\w]\\$\[\033[0m\] "
|
||||
fi
|
||||
if test "$TERM" = "xterm"; then
|
||||
PS1="\[\033]2;\h:\u:\w\007\]$PS1"
|
||||
|
@ -85,11 +85,13 @@ in
|
||||
# SSH agent protocol doesn't support changing TTYs, so bind the agent
|
||||
# to every new TTY.
|
||||
${pkgs.gnupg}/bin/gpg-connect-agent --quiet updatestartuptty /bye > /dev/null
|
||||
'');
|
||||
|
||||
environment.extraInit = mkIf cfg.agent.enableSSHSupport ''
|
||||
if [ -z "$SSH_AUTH_SOCK" ]; then
|
||||
export SSH_AUTH_SOCK=$(${pkgs.gnupg}/bin/gpgconf --list-dirs agent-ssh-socket)
|
||||
fi
|
||||
'');
|
||||
'';
|
||||
|
||||
assertions = [
|
||||
{ assertion = cfg.agent.enableSSHSupport -> !config.programs.ssh.startAgent;
|
||||
|
@ -186,6 +186,9 @@ with lib;
|
||||
# parsoid
|
||||
(mkRemovedOptionModule [ "services" "parsoid" "interwikis" ] [ "services" "parsoid" "wikis" ])
|
||||
|
||||
# plexpy / tautulli
|
||||
(mkRenamedOptionModule [ "services" "plexpy" ] [ "services" "tautulli" ])
|
||||
|
||||
# piwik was renamed to matomo
|
||||
(mkRenamedOptionModule [ "services" "piwik" "enable" ] [ "services" "matomo" "enable" ])
|
||||
(mkRenamedOptionModule [ "services" "piwik" "webServerUser" ] [ "services" "matomo" "webServerUser" ])
|
||||
|
217
nixos/modules/services/audio/snapserver.nix
Normal file
217
nixos/modules/services/audio/snapserver.nix
Normal file
@ -0,0 +1,217 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
package = "snapcast";
|
||||
name = "snapserver";
|
||||
|
||||
cfg = config.services.snapserver;
|
||||
|
||||
# Using types.nullOr to inherit upstream defaults.
|
||||
sampleFormat = mkOption {
|
||||
type = with types; nullOr str;
|
||||
default = null;
|
||||
description = ''
|
||||
Default sample format.
|
||||
'';
|
||||
example = "48000:16:2";
|
||||
};
|
||||
|
||||
codec = mkOption {
|
||||
type = with types; nullOr str;
|
||||
default = null;
|
||||
description = ''
|
||||
Default audio compression method.
|
||||
'';
|
||||
example = "flac";
|
||||
};
|
||||
|
||||
streamToOption = name: opt:
|
||||
let
|
||||
os = val:
|
||||
optionalString (val != null) "${val}";
|
||||
os' = prefixx: val:
|
||||
optionalString (val != null) (prefixx + "${val}");
|
||||
flatten = key: value:
|
||||
"&${key}=${value}";
|
||||
in
|
||||
"-s ${opt.type}://" + os opt.location + "?" + os' "name=" name
|
||||
+ concatStrings (mapAttrsToList flatten opt.query);
|
||||
|
||||
optionalNull = val: ret:
|
||||
optional (val != null) ret;
|
||||
|
||||
optionString = concatStringsSep " " (mapAttrsToList streamToOption cfg.streams
|
||||
++ ["-p ${toString cfg.port}"]
|
||||
++ ["--controlPort ${toString cfg.controlPort}"]
|
||||
++ optionalNull cfg.sampleFormat "--sampleFormat ${cfg.sampleFormat}"
|
||||
++ optionalNull cfg.codec "-c ${cfg.codec}"
|
||||
++ optionalNull cfg.streamBuffer "--streamBuffer ${cfg.streamBuffer}"
|
||||
++ optionalNull cfg.buffer "-b ${cfg.buffer}"
|
||||
++ optional cfg.sendToMuted "--sendToMuted");
|
||||
|
||||
in {
|
||||
|
||||
###### interface
|
||||
|
||||
options = {
|
||||
|
||||
services.snapserver = {
|
||||
|
||||
enable = mkOption {
|
||||
type = types.bool;
|
||||
default = false;
|
||||
description = ''
|
||||
Whether to enable snapserver.
|
||||
'';
|
||||
};
|
||||
|
||||
port = mkOption {
|
||||
type = types.port;
|
||||
default = 1704;
|
||||
description = ''
|
||||
The port that snapclients can connect to.
|
||||
'';
|
||||
};
|
||||
|
||||
controlPort = mkOption {
|
||||
type = types.port;
|
||||
default = 1705;
|
||||
description = ''
|
||||
The port for control connections (JSON-RPC).
|
||||
'';
|
||||
};
|
||||
|
||||
openFirewall = mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = ''
|
||||
Whether to automatically open the specified ports in the firewall.
|
||||
'';
|
||||
};
|
||||
|
||||
inherit sampleFormat;
|
||||
inherit codec;
|
||||
|
||||
streams = mkOption {
|
||||
type = with types; attrsOf (submodule {
|
||||
options = {
|
||||
location = mkOption {
|
||||
type = types.path;
|
||||
description = ''
|
||||
The location of the pipe.
|
||||
'';
|
||||
};
|
||||
type = mkOption {
|
||||
type = types.enum [ "pipe" "file" "process" "spotify" "airplay" ];
|
||||
default = "pipe";
|
||||
description = ''
|
||||
The type of input stream.
|
||||
'';
|
||||
};
|
||||
query = mkOption {
|
||||
type = attrsOf str;
|
||||
default = {};
|
||||
description = ''
|
||||
Key-value pairs that convey additional parameters about a stream.
|
||||
'';
|
||||
example = literalExample ''
|
||||
# for type == "pipe":
|
||||
{
|
||||
mode = "listen";
|
||||
};
|
||||
# for type == "process":
|
||||
{
|
||||
params = "--param1 --param2";
|
||||
logStderr = "true";
|
||||
};
|
||||
'';
|
||||
};
|
||||
inherit sampleFormat;
|
||||
inherit codec;
|
||||
};
|
||||
});
|
||||
default = { default = {}; };
|
||||
description = ''
|
||||
The definition for an input source.
|
||||
'';
|
||||
example = literalExample ''
|
||||
{
|
||||
mpd = {
|
||||
type = "pipe";
|
||||
location = "/run/snapserver/mpd";
|
||||
sampleFormat = "48000:16:2";
|
||||
codec = "pcm";
|
||||
};
|
||||
};
|
||||
'';
|
||||
};
|
||||
|
||||
streamBuffer = mkOption {
|
||||
type = with types; nullOr int;
|
||||
default = null;
|
||||
description = ''
|
||||
Stream read (input) buffer in ms.
|
||||
'';
|
||||
example = 20;
|
||||
};
|
||||
|
||||
buffer = mkOption {
|
||||
type = with types; nullOr int;
|
||||
default = null;
|
||||
description = ''
|
||||
Network buffer in ms.
|
||||
'';
|
||||
example = 1000;
|
||||
};
|
||||
|
||||
sendToMuted = mkOption {
|
||||
type = types.bool;
|
||||
default = false;
|
||||
description = ''
|
||||
Send audio to muted clients.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
|
||||
###### implementation
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
|
||||
systemd.services.snapserver = {
|
||||
after = [ "network.target" ];
|
||||
description = "Snapserver";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
before = [ "mpd.service" "mopidy.service" ];
|
||||
|
||||
serviceConfig = {
|
||||
DynamicUser = true;
|
||||
ExecStart = "${pkgs.snapcast}/bin/snapserver --daemon ${optionString}";
|
||||
Type = "forking";
|
||||
LimitRTPRIO = 50;
|
||||
LimitRTTIME = "infinity";
|
||||
NoNewPrivileges = true;
|
||||
PIDFile = "/run/${name}/pid";
|
||||
ProtectKernelTunables = true;
|
||||
ProtectControlGroups = true;
|
||||
ProtectKernelModules = true;
|
||||
RestrictAddressFamilies = "AF_INET AF_INET6 AF_UNIX";
|
||||
RestrictNamespaces = true;
|
||||
RuntimeDirectory = name;
|
||||
StateDirectory = name;
|
||||
};
|
||||
};
|
||||
|
||||
networking.firewall.allowedTCPPorts = optionals cfg.openFirewall [ cfg.port cfg.controlPort ];
|
||||
};
|
||||
|
||||
meta = {
|
||||
maintainers = with maintainers; [ tobim ];
|
||||
};
|
||||
|
||||
}
|
141
nixos/modules/services/backup/duplicity.nix
Normal file
141
nixos/modules/services/backup/duplicity.nix
Normal file
@ -0,0 +1,141 @@
|
||||
{ config, lib, pkgs, ...}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
cfg = config.services.duplicity;
|
||||
|
||||
stateDirectory = "/var/lib/duplicity";
|
||||
|
||||
localTarget = if hasPrefix "file://" cfg.targetUrl
|
||||
then removePrefix "file://" cfg.targetUrl else null;
|
||||
|
||||
in {
|
||||
options.services.duplicity = {
|
||||
enable = mkEnableOption "backups with duplicity";
|
||||
|
||||
root = mkOption {
|
||||
type = types.path;
|
||||
default = "/";
|
||||
description = ''
|
||||
Root directory to backup.
|
||||
'';
|
||||
};
|
||||
|
||||
include = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
example = [ "/home" ];
|
||||
description = ''
|
||||
List of paths to include into the backups. See the FILE SELECTION
|
||||
section in <citerefentry><refentrytitle>duplicity</refentrytitle>
|
||||
<manvolnum>1</manvolnum></citerefentry> for details on the syntax.
|
||||
'';
|
||||
};
|
||||
|
||||
exclude = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
description = ''
|
||||
List of paths to exclude from backups. See the FILE SELECTION section in
|
||||
<citerefentry><refentrytitle>duplicity</refentrytitle>
|
||||
<manvolnum>1</manvolnum></citerefentry> for details on the syntax.
|
||||
'';
|
||||
};
|
||||
|
||||
targetUrl = mkOption {
|
||||
type = types.str;
|
||||
example = "s3://host:port/prefix";
|
||||
description = ''
|
||||
Target url to backup to. See the URL FORMAT section in
|
||||
<citerefentry><refentrytitle>duplicity</refentrytitle>
|
||||
<manvolnum>1</manvolnum></citerefentry> for supported urls.
|
||||
'';
|
||||
};
|
||||
|
||||
secretFile = mkOption {
|
||||
type = types.nullOr types.path;
|
||||
default = null;
|
||||
description = ''
|
||||
Path of a file containing secrets (gpg passphrase, access key...) in
|
||||
the format of EnvironmentFile as described by
|
||||
<citerefentry><refentrytitle>systemd.exec</refentrytitle>
|
||||
<manvolnum>5</manvolnum></citerefentry>. For example:
|
||||
<programlisting>
|
||||
PASSPHRASE=<replaceable>...</replaceable>
|
||||
AWS_ACCESS_KEY_ID=<replaceable>...</replaceable>
|
||||
AWS_SECRET_ACCESS_KEY=<replaceable>...</replaceable>
|
||||
</programlisting>
|
||||
'';
|
||||
};
|
||||
|
||||
frequency = mkOption {
|
||||
type = types.nullOr types.str;
|
||||
default = "daily";
|
||||
description = ''
|
||||
Run duplicity with the given frequency (see
|
||||
<citerefentry><refentrytitle>systemd.time</refentrytitle>
|
||||
<manvolnum>7</manvolnum></citerefentry> for the format).
|
||||
If null, do not run automatically.
|
||||
'';
|
||||
};
|
||||
|
||||
extraFlags = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
example = [ "--full-if-older-than" "1M" ];
|
||||
description = ''
|
||||
Extra command-line flags passed to duplicity. See
|
||||
<citerefentry><refentrytitle>duplicity</refentrytitle>
|
||||
<manvolnum>1</manvolnum></citerefentry>.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
systemd = {
|
||||
services.duplicity = {
|
||||
description = "backup files with duplicity";
|
||||
|
||||
environment.HOME = stateDirectory;
|
||||
|
||||
serviceConfig = {
|
||||
ExecStart = ''
|
||||
${pkgs.duplicity}/bin/duplicity ${escapeShellArgs (
|
||||
[
|
||||
cfg.root
|
||||
cfg.targetUrl
|
||||
"--archive-dir" stateDirectory
|
||||
]
|
||||
++ concatMap (p: [ "--include" p ]) cfg.include
|
||||
++ concatMap (p: [ "--exclude" p ]) cfg.exclude
|
||||
++ cfg.extraFlags)}
|
||||
'';
|
||||
PrivateTmp = true;
|
||||
ProtectSystem = "strict";
|
||||
ProtectHome = "read-only";
|
||||
StateDirectory = baseNameOf stateDirectory;
|
||||
} // optionalAttrs (localTarget != null) {
|
||||
ReadWritePaths = localTarget;
|
||||
} // optionalAttrs (cfg.secretFile != null) {
|
||||
EnvironmentFile = cfg.secretFile;
|
||||
};
|
||||
} // optionalAttrs (cfg.frequency != null) {
|
||||
startAt = cfg.frequency;
|
||||
};
|
||||
|
||||
tmpfiles.rules = optional (localTarget != null) "d ${localTarget} 0700 root root -";
|
||||
};
|
||||
|
||||
assertions = singleton {
|
||||
# Duplicity will fail if the last file selection option is an include. It
|
||||
# is not always possible to detect but this simple case can be caught.
|
||||
assertion = cfg.include != [] -> cfg.exclude != [] || cfg.extraFlags != [];
|
||||
message = ''
|
||||
Duplicity will fail if you only specify included paths ("Because the
|
||||
default is to include all files, the expression is redundant. Exiting
|
||||
because this probably isn't what you meant.")
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
@ -38,6 +38,18 @@ in {
|
||||
type = types.int;
|
||||
};
|
||||
|
||||
reconcileMode = mkOption {
|
||||
description = ''
|
||||
Controls the addon manager reconciliation mode for the DNS addon.
|
||||
|
||||
Setting reconcile mode to EnsureExists makes it possible to tailor DNS behavior by editing the coredns ConfigMap.
|
||||
|
||||
See: <link xlink:href="https://github.com/kubernetes/kubernetes/blob/master/cluster/addons/addon-manager/README.md"/>.
|
||||
'';
|
||||
default = "Reconcile";
|
||||
type = types.enum [ "Reconcile" "EnsureExists" ];
|
||||
};
|
||||
|
||||
coredns = mkOption {
|
||||
description = "Docker image to seed for the CoreDNS container.";
|
||||
type = types.attrs;
|
||||
@ -131,7 +143,7 @@ in {
|
||||
kind = "ConfigMap";
|
||||
metadata = {
|
||||
labels = {
|
||||
"addonmanager.kubernetes.io/mode" = "Reconcile";
|
||||
"addonmanager.kubernetes.io/mode" = cfg.reconcileMode;
|
||||
"k8s-app" = "kube-dns";
|
||||
"kubernetes.io/cluster-service" = "true";
|
||||
};
|
||||
@ -162,7 +174,7 @@ in {
|
||||
kind = "Deployment";
|
||||
metadata = {
|
||||
labels = {
|
||||
"addonmanager.kubernetes.io/mode" = "Reconcile";
|
||||
"addonmanager.kubernetes.io/mode" = cfg.reconcileMode;
|
||||
"k8s-app" = "kube-dns";
|
||||
"kubernetes.io/cluster-service" = "true";
|
||||
"kubernetes.io/name" = "CoreDNS";
|
||||
|
@ -350,7 +350,7 @@ in
|
||||
listenPeerUrls = mkDefault ["https://0.0.0.0:2380"];
|
||||
advertiseClientUrls = mkDefault ["https://${top.masterAddress}:2379"];
|
||||
initialCluster = mkDefault ["${top.masterAddress}=https://${top.masterAddress}:2380"];
|
||||
name = top.masterAddress;
|
||||
name = mkDefault top.masterAddress;
|
||||
initialAdvertisePeerUrls = mkDefault ["https://${top.masterAddress}:2380"];
|
||||
};
|
||||
|
||||
|
@ -131,7 +131,7 @@ in
|
||||
${optionalString (cfg.tlsCertFile!=null)
|
||||
"--tls-cert-file=${cfg.tlsCertFile}"} \
|
||||
${optionalString (cfg.tlsKeyFile!=null)
|
||||
"--tls-key-file=${cfg.tlsKeyFile}"} \
|
||||
"--tls-private-key-file=${cfg.tlsKeyFile}"} \
|
||||
${optionalString (elem "RBAC" top.apiserver.authorizationMode)
|
||||
"--use-service-account-credentials"} \
|
||||
${optionalString (cfg.verbosity != null) "--v=${toString cfg.verbosity}"} \
|
||||
|
@ -10,7 +10,7 @@ let
|
||||
kind = "Config";
|
||||
clusters = [{
|
||||
name = "local";
|
||||
cluster.certificate-authority = cfg.caFile;
|
||||
cluster.certificate-authority = conf.caFile or cfg.caFile;
|
||||
cluster.server = conf.server;
|
||||
}];
|
||||
users = [{
|
||||
|
@ -146,7 +146,7 @@ in
|
||||
chown -R "${cfg.user}:${cfg.group}" "${cfg.dataDir}"
|
||||
'';
|
||||
serviceConfig.ExecStart =
|
||||
"${openldap.out}/libexec/slapd -d ${cfg.logLevel} " +
|
||||
"${openldap.out}/libexec/slapd -d '${cfg.logLevel}' " +
|
||||
"-u '${cfg.user}' -g '${cfg.group}' " +
|
||||
"-h '${concatStringsSep " " cfg.urlList}' " +
|
||||
"${configOpts}";
|
||||
|
@ -47,6 +47,8 @@ let
|
||||
${cfg.levels}
|
||||
'';
|
||||
|
||||
thinkfan = pkgs.thinkfan.override { smartSupport = cfg.smartSupport; };
|
||||
|
||||
in {
|
||||
|
||||
options = {
|
||||
@ -61,6 +63,15 @@ in {
|
||||
'';
|
||||
};
|
||||
|
||||
smartSupport = mkOption {
|
||||
type = types.bool;
|
||||
default = false;
|
||||
description = ''
|
||||
Whether to build thinkfan with SMART support to read temperatures
|
||||
directly from hard disks.
|
||||
'';
|
||||
};
|
||||
|
||||
sensors = mkOption {
|
||||
type = types.lines;
|
||||
default = ''
|
||||
@ -77,7 +88,7 @@ in {
|
||||
Which may be provided by any hwmon drivers (keyword
|
||||
hwmon)
|
||||
|
||||
S.M.A.R.T. (since 0.9 and requires the USE_ATASMART compilation flag)
|
||||
S.M.A.R.T. (requires smartSupport to be enabled)
|
||||
Which reads the temperature directly from the hard
|
||||
disk using libatasmart (keyword atasmart)
|
||||
|
||||
@ -125,18 +136,17 @@ in {
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
|
||||
environment.systemPackages = [ pkgs.thinkfan ];
|
||||
environment.systemPackages = [ thinkfan ];
|
||||
|
||||
systemd.services.thinkfan = {
|
||||
description = "Thinkfan";
|
||||
after = [ "basic.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
path = [ pkgs.thinkfan ];
|
||||
serviceConfig.ExecStart = "${pkgs.thinkfan}/bin/thinkfan -n -c ${configFile}";
|
||||
path = [ thinkfan ];
|
||||
serviceConfig.ExecStart = "${thinkfan}/bin/thinkfan -n -c ${configFile}";
|
||||
};
|
||||
|
||||
boot.extraModprobeConfig = "options thinkpad_acpi experimental=1 fan_control=1";
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
91
nixos/modules/services/mail/davmail.nix
Normal file
91
nixos/modules/services/mail/davmail.nix
Normal file
@ -0,0 +1,91 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
cfg = config.services.davmail;
|
||||
|
||||
configType = with types;
|
||||
either (either (attrsOf configType) str) (either int bool) // {
|
||||
description = "davmail config type (str, int, bool or attribute set thereof)";
|
||||
};
|
||||
|
||||
toStr = val: if isBool val then boolToString val else toString val;
|
||||
|
||||
linesForAttrs = attrs: concatMap (name: let value = attrs.${name}; in
|
||||
if isAttrs value
|
||||
then map (line: name + "." + line) (linesForAttrs value)
|
||||
else [ "${name}=${toStr value}" ]
|
||||
) (attrNames attrs);
|
||||
|
||||
configFile = pkgs.writeText "davmail.properties" (concatStringsSep "\n" (linesForAttrs cfg.config));
|
||||
|
||||
in
|
||||
|
||||
{
|
||||
options.services.davmail = {
|
||||
enable = mkEnableOption "davmail, an MS Exchange gateway";
|
||||
|
||||
url = mkOption {
|
||||
type = types.str;
|
||||
description = "Outlook Web Access URL to access the exchange server, i.e. the base webmail URL.";
|
||||
example = "https://outlook.office365.com/EWS/Exchange.asmx";
|
||||
};
|
||||
|
||||
config = mkOption {
|
||||
type = configType;
|
||||
default = {};
|
||||
description = ''
|
||||
Davmail configuration. Refer to
|
||||
<link xlink:href="http://davmail.sourceforge.net/serversetup.html"/>
|
||||
and <link xlink:href="http://davmail.sourceforge.net/advanced.html"/>
|
||||
for details on supported values.
|
||||
'';
|
||||
example = literalExample ''
|
||||
{
|
||||
davmail.allowRemote = true;
|
||||
davmail.imapPort = 55555;
|
||||
davmail.bindAddress = "10.0.1.2";
|
||||
davmail.smtpSaveInSent = true;
|
||||
davmail.folderSizeLimit = 10;
|
||||
davmail.caldavAutoSchedule = false;
|
||||
log4j.logger.rootLogger = "DEBUG";
|
||||
}
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
|
||||
services.davmail.config.davmail = mapAttrs (name: mkDefault) {
|
||||
server = true;
|
||||
disableUpdateCheck = true;
|
||||
logFilePath = "/var/log/davmail/davmail.log";
|
||||
logFileSize = "1MB";
|
||||
mode = "auto";
|
||||
url = cfg.url;
|
||||
caldavPort = 1080;
|
||||
imapPort = 1143;
|
||||
ldapPort = 1389;
|
||||
popPort = 1110;
|
||||
smtpPort = 1025;
|
||||
};
|
||||
|
||||
systemd.services.davmail = {
|
||||
description = "DavMail POP/IMAP/SMTP Exchange Gateway";
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
ExecStart = "${pkgs.davmail}/bin/davmail ${configFile}";
|
||||
Restart = "on-failure";
|
||||
DynamicUser = "yes";
|
||||
LogsDirectory = "davmail";
|
||||
};
|
||||
};
|
||||
|
||||
environment.systemPackages = [ pkgs.davmail ];
|
||||
};
|
||||
}
|
@ -2,10 +2,10 @@
|
||||
|
||||
with lib;
|
||||
let
|
||||
gunicorn = pkgs.pythonPackages.gunicorn;
|
||||
gunicorn = pkgs.python3Packages.gunicorn;
|
||||
bepasty = pkgs.bepasty;
|
||||
gevent = pkgs.pythonPackages.gevent;
|
||||
python = pkgs.pythonPackages.python;
|
||||
gevent = pkgs.python3Packages.gevent;
|
||||
python = pkgs.python3Packages.python;
|
||||
cfg = config.services.bepasty;
|
||||
user = "bepasty";
|
||||
group = "bepasty";
|
||||
|
@ -234,16 +234,39 @@ in
|
||||
|
||||
environment.systemPackages = [ cfg.package ];
|
||||
|
||||
# create symlinks for the basic directory layout the redmine package expects
|
||||
systemd.tmpfiles.rules = [
|
||||
"d '${cfg.stateDir}' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
"d '${cfg.stateDir}/cache' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
"d '${cfg.stateDir}/config' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
"d '${cfg.stateDir}/files' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
"d '${cfg.stateDir}/log' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
"d '${cfg.stateDir}/plugins' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
"d '${cfg.stateDir}/public' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
"d '${cfg.stateDir}/public/plugin_assets' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
"d '${cfg.stateDir}/public/themes' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
"d '${cfg.stateDir}/tmp' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
|
||||
"d /run/redmine - - - - -"
|
||||
"d /run/redmine/public - - - - -"
|
||||
"L+ /run/redmine/config - - - - ${cfg.stateDir}/config"
|
||||
"L+ /run/redmine/files - - - - ${cfg.stateDir}/files"
|
||||
"L+ /run/redmine/log - - - - ${cfg.stateDir}/log"
|
||||
"L+ /run/redmine/plugins - - - - ${cfg.stateDir}/plugins"
|
||||
"L+ /run/redmine/public/plugin_assets - - - - ${cfg.stateDir}/public/plugin_assets"
|
||||
"L+ /run/redmine/public/themes - - - - ${cfg.stateDir}/public/themes"
|
||||
"L+ /run/redmine/tmp - - - - ${cfg.stateDir}/tmp"
|
||||
];
|
||||
|
||||
systemd.services.redmine = {
|
||||
after = [ "network.target" (if cfg.database.type == "mysql2" then "mysql.service" else "postgresql.service") ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
environment.HOME = "${cfg.package}/share/redmine";
|
||||
environment.RAILS_ENV = "production";
|
||||
environment.RAILS_CACHE = "${cfg.stateDir}/cache";
|
||||
environment.REDMINE_LANG = "en";
|
||||
environment.SCHEMA = "${cfg.stateDir}/cache/schema.db";
|
||||
path = with pkgs; [
|
||||
imagemagickBig
|
||||
imagemagick
|
||||
bazaar
|
||||
cvs
|
||||
darcs
|
||||
@ -252,28 +275,16 @@ in
|
||||
subversion
|
||||
];
|
||||
preStart = ''
|
||||
# ensure cache directory exists for db:migrate command
|
||||
mkdir -p "${cfg.stateDir}/cache"
|
||||
|
||||
# create the basic directory layout the redmine package expects
|
||||
mkdir -p /run/redmine/public
|
||||
|
||||
for i in config files log plugins tmp; do
|
||||
mkdir -p "${cfg.stateDir}/$i"
|
||||
ln -fs "${cfg.stateDir}/$i" /run/redmine/
|
||||
done
|
||||
|
||||
for i in plugin_assets themes; do
|
||||
mkdir -p "${cfg.stateDir}/public/$i"
|
||||
ln -fs "${cfg.stateDir}/public/$i" /run/redmine/public/
|
||||
done
|
||||
|
||||
rm -rf "${cfg.stateDir}/plugins/"*
|
||||
rm -rf "${cfg.stateDir}/public/themes/"*
|
||||
|
||||
# start with a fresh config directory
|
||||
# the config directory is copied instead of linked as some mutable data is stored in there
|
||||
rm -rf "${cfg.stateDir}/config/"*
|
||||
find "${cfg.stateDir}/config" ! -name "secret_token.rb" -type f -exec rm -f {} +
|
||||
cp -r ${cfg.package}/share/redmine/config.dist/* "${cfg.stateDir}/config/"
|
||||
|
||||
chmod -R u+w "${cfg.stateDir}/config"
|
||||
|
||||
# link in the application configuration
|
||||
ln -fs ${configurationYml} "${cfg.stateDir}/config/configuration.yml"
|
||||
|
||||
@ -282,7 +293,6 @@ in
|
||||
|
||||
|
||||
# link in all user specified themes
|
||||
rm -rf "${cfg.stateDir}/public/themes/"*
|
||||
for theme in ${concatStringsSep " " (mapAttrsToList unpackTheme cfg.themes)}; do
|
||||
ln -fs $theme/* "${cfg.stateDir}/public/themes"
|
||||
done
|
||||
@ -292,16 +302,11 @@ in
|
||||
|
||||
|
||||
# link in all user specified plugins
|
||||
rm -rf "${cfg.stateDir}/plugins/"*
|
||||
for plugin in ${concatStringsSep " " (mapAttrsToList unpackPlugin cfg.plugins)}; do
|
||||
ln -fs $plugin/* "${cfg.stateDir}/plugins/''${plugin##*-redmine-plugin-}"
|
||||
done
|
||||
|
||||
|
||||
# ensure correct permissions for most files
|
||||
chmod -R ug+rwX,o-rwx+x "${cfg.stateDir}/"
|
||||
|
||||
|
||||
# handle database.passwordFile & permissions
|
||||
DBPASS=$(head -n1 ${cfg.database.passwordFile})
|
||||
cp -f ${databaseYml} "${cfg.stateDir}/config/database.yml"
|
||||
@ -315,25 +320,13 @@ in
|
||||
chmod 440 "${cfg.stateDir}/config/initializers/secret_token.rb"
|
||||
fi
|
||||
|
||||
|
||||
# ensure everything is owned by ${cfg.user}
|
||||
chown -R ${cfg.user}:${cfg.group} "${cfg.stateDir}"
|
||||
|
||||
|
||||
# execute redmine required commands prior to starting the application
|
||||
# NOTE: su required in case using mysql socket authentication
|
||||
/run/wrappers/bin/su -s ${pkgs.bash}/bin/bash -m -l redmine -c '${bundle} exec rake db:migrate'
|
||||
/run/wrappers/bin/su -s ${pkgs.bash}/bin/bash -m -l redmine -c '${bundle} exec rake redmine:plugins:migrate'
|
||||
/run/wrappers/bin/su -s ${pkgs.bash}/bin/bash -m -l redmine -c '${bundle} exec rake redmine:load_default_data'
|
||||
|
||||
|
||||
# log files don't exist until after first command has been executed
|
||||
# correct ownership of files generated by calling exec rake ...
|
||||
chown -R ${cfg.user}:${cfg.group} "${cfg.stateDir}/log"
|
||||
${bundle} exec rake db:migrate
|
||||
${bundle} exec rake redmine:plugins:migrate
|
||||
${bundle} exec rake redmine:load_default_data
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
PermissionsStartOnly = true; # preStart must be run as root
|
||||
Type = "simple";
|
||||
User = cfg.user;
|
||||
Group = cfg.group;
|
||||
@ -348,7 +341,6 @@ in
|
||||
{ name = "redmine";
|
||||
group = cfg.group;
|
||||
home = cfg.stateDir;
|
||||
createHome = true;
|
||||
uid = config.ids.uids.redmine;
|
||||
});
|
||||
|
||||
|
@ -3,73 +3,69 @@
|
||||
with lib;
|
||||
|
||||
let
|
||||
cfg = config.services.plexpy;
|
||||
cfg = config.services.tautulli;
|
||||
in
|
||||
{
|
||||
options = {
|
||||
services.plexpy = {
|
||||
enable = mkEnableOption "PlexPy Plex Monitor";
|
||||
services.tautulli = {
|
||||
enable = mkEnableOption "Tautulli Plex Monitor";
|
||||
|
||||
dataDir = mkOption {
|
||||
type = types.str;
|
||||
default = "/var/lib/plexpy";
|
||||
description = "The directory where PlexPy stores its data files.";
|
||||
description = "The directory where Tautulli stores its data files.";
|
||||
};
|
||||
|
||||
configFile = mkOption {
|
||||
type = types.str;
|
||||
default = "/var/lib/plexpy/config.ini";
|
||||
description = "The location of PlexPy's config file.";
|
||||
description = "The location of Tautulli's config file.";
|
||||
};
|
||||
|
||||
port = mkOption {
|
||||
type = types.int;
|
||||
default = 8181;
|
||||
description = "TCP port where PlexPy listens.";
|
||||
description = "TCP port where Tautulli listens.";
|
||||
};
|
||||
|
||||
user = mkOption {
|
||||
type = types.str;
|
||||
default = "plexpy";
|
||||
description = "User account under which PlexPy runs.";
|
||||
description = "User account under which Tautulli runs.";
|
||||
};
|
||||
|
||||
group = mkOption {
|
||||
type = types.str;
|
||||
default = "nogroup";
|
||||
description = "Group under which PlexPy runs.";
|
||||
description = "Group under which Tautulli runs.";
|
||||
};
|
||||
|
||||
package = mkOption {
|
||||
type = types.package;
|
||||
default = pkgs.plexpy;
|
||||
defaultText = "pkgs.plexpy";
|
||||
default = pkgs.tautulli;
|
||||
defaultText = "pkgs.tautulli";
|
||||
description = ''
|
||||
The PlexPy package to use.
|
||||
The Tautulli package to use.
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
systemd.services.plexpy = {
|
||||
description = "PlexPy Plex Monitor";
|
||||
systemd.tmpfiles.rules = [
|
||||
"d '${cfg.dataDir}' - ${cfg.user} ${cfg.group} - -"
|
||||
];
|
||||
|
||||
systemd.services.tautulli = {
|
||||
description = "Tautulli Plex Monitor";
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
preStart = ''
|
||||
test -d "${cfg.dataDir}" || {
|
||||
echo "Creating initial PlexPy data directory in \"${cfg.dataDir}\"."
|
||||
mkdir -p "${cfg.dataDir}"
|
||||
chown ${cfg.user}:${cfg.group} "${cfg.dataDir}"
|
||||
}
|
||||
'';
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
User = cfg.user;
|
||||
Group = cfg.group;
|
||||
PermissionsStartOnly = "true";
|
||||
GuessMainPID = "false";
|
||||
ExecStart = "${cfg.package}/bin/plexpy --datadir ${cfg.dataDir} --config ${cfg.configFile} --port ${toString cfg.port} --pidfile ${cfg.dataDir}/plexpy.pid --nolaunch";
|
||||
ExecStart = "${cfg.package}/bin/tautulli --datadir ${cfg.dataDir} --config ${cfg.configFile} --port ${toString cfg.port} --pidfile ${cfg.dataDir}/tautulli.pid --nolaunch";
|
||||
Restart = "on-failure";
|
||||
};
|
||||
};
|
@ -50,7 +50,7 @@ let
|
||||
ZM_DB_TYPE=mysql
|
||||
ZM_DB_HOST=${cfg.database.host}
|
||||
ZM_DB_NAME=${cfg.database.name}
|
||||
ZM_DB_USER=${cfg.database.username}
|
||||
ZM_DB_USER=${if cfg.database.createLocally then user else cfg.database.username}
|
||||
ZM_DB_PASS=${cfg.database.password}
|
||||
|
||||
# Web
|
||||
@ -205,12 +205,12 @@ in {
|
||||
|
||||
mysql = lib.mkIf cfg.database.createLocally {
|
||||
ensureDatabases = [ cfg.database.name ];
|
||||
initialDatabases = [{
|
||||
inherit (cfg.database) name; schema = "${pkg}/share/zoneminder/db/zm_create.sql";
|
||||
}];
|
||||
ensureUsers = [{
|
||||
name = cfg.database.username;
|
||||
ensurePermissions = { "${cfg.database.name}.*" = "ALL PRIVILEGES"; };
|
||||
initialDatabases = [
|
||||
{ inherit (cfg.database) name; schema = "${pkg}/share/zoneminder/db/zm_create.sql"; }
|
||||
];
|
||||
}];
|
||||
};
|
||||
|
||||
@ -275,14 +275,14 @@ in {
|
||||
};
|
||||
|
||||
phpfpm = lib.mkIf useNginx {
|
||||
phpOptions = ''
|
||||
date.timezone = "${config.time.timeZone}"
|
||||
|
||||
${lib.concatStringsSep "\n" (map (e:
|
||||
"extension=${e.pkg}/lib/php/extensions/${e.name}.so") phpExtensions)}
|
||||
'';
|
||||
pools.zoneminder = {
|
||||
listen = socket;
|
||||
phpOptions = ''
|
||||
date.timezone = "${config.time.timeZone}"
|
||||
|
||||
${lib.concatStringsSep "\n" (map (e:
|
||||
"extension=${e.pkg}/lib/php/extensions/${e.name}.so") phpExtensions)}
|
||||
'';
|
||||
extraConfig = ''
|
||||
user = ${user}
|
||||
group = ${group}
|
||||
|
@ -260,7 +260,7 @@ in {
|
||||
path = [ ];
|
||||
script = ''
|
||||
export DD_API_KEY=$(head -n 1 ${cfg.apiKeyFile})
|
||||
${pkgs.datadog-trace-agent}/bin/trace-agent -config /etc/datadog-agent/datadog.yaml
|
||||
${datadogPkg}/bin/trace-agent -config /etc/datadog-agent/datadog.yaml
|
||||
'';
|
||||
});
|
||||
|
||||
|
@ -16,6 +16,7 @@ in
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
boot.kernelModules = [ "hdapsd" ];
|
||||
services.udev.packages = hdapsd;
|
||||
systemd.packages = hdapsd;
|
||||
};
|
||||
|
@ -17,7 +17,6 @@ let
|
||||
'';
|
||||
|
||||
mosquittoConf = pkgs.writeText "mosquitto.conf" ''
|
||||
pid_file /run/mosquitto/pid
|
||||
acl_file ${aclFile}
|
||||
persistence true
|
||||
allow_anonymous ${boolToString cfg.allowAnonymous}
|
||||
@ -196,15 +195,15 @@ in
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" ];
|
||||
serviceConfig = {
|
||||
Type = "forking";
|
||||
Type = "notify";
|
||||
NotifyAccess = "main";
|
||||
User = "mosquitto";
|
||||
Group = "mosquitto";
|
||||
RuntimeDirectory = "mosquitto";
|
||||
WorkingDirectory = cfg.dataDir;
|
||||
Restart = "on-failure";
|
||||
ExecStart = "${pkgs.mosquitto}/bin/mosquitto -c ${mosquittoConf} -d";
|
||||
ExecStart = "${pkgs.mosquitto}/bin/mosquitto -c ${mosquittoConf}";
|
||||
ExecReload = "${pkgs.coreutils}/bin/kill -HUP $MAINPID";
|
||||
PIDFile = "/run/mosquitto/pid";
|
||||
};
|
||||
preStart = ''
|
||||
rm -f ${cfg.dataDir}/passwd
|
||||
@ -214,7 +213,7 @@ in
|
||||
if c.hashedPassword != null then
|
||||
"echo '${n}:${c.hashedPassword}' >> ${cfg.dataDir}/passwd"
|
||||
else optionalString (c.password != null)
|
||||
"${pkgs.mosquitto}/bin/mosquitto_passwd -b ${cfg.dataDir}/passwd ${n} ${c.password}"
|
||||
"${pkgs.mosquitto}/bin/mosquitto_passwd -b ${cfg.dataDir}/passwd ${n} '${c.password}'"
|
||||
) cfg.users);
|
||||
};
|
||||
|
||||
|
@ -65,9 +65,12 @@ in {
|
||||
after = [ "network-online.target" "keys.target" ];
|
||||
wants = [ "keys.target" ];
|
||||
path = with pkgs; [ kmod iproute iptables utillinux ];
|
||||
environment.STRONGSWAN_CONF = pkgs.writeTextFile {
|
||||
name = "strongswan.conf";
|
||||
text = cfg.strongswan.extraConfig;
|
||||
environment = {
|
||||
STRONGSWAN_CONF = pkgs.writeTextFile {
|
||||
name = "strongswan.conf";
|
||||
text = cfg.strongswan.extraConfig;
|
||||
};
|
||||
SWANCTL_DIR = "/etc/swanctl";
|
||||
};
|
||||
restartTriggers = [ config.environment.etc."swanctl/swanctl.conf".source ];
|
||||
serviceConfig = {
|
||||
|
@ -546,26 +546,26 @@ in {
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
services.phpfpm.poolConfigs = mkIf (cfg.pool == "${poolName}") {
|
||||
"${poolName}" = ''
|
||||
listen = "${phpfpmSocketName}"
|
||||
listen.owner = nginx
|
||||
listen.group = nginx
|
||||
listen.mode = 0600
|
||||
user = icingaweb2
|
||||
pm = dynamic
|
||||
pm.max_children = 75
|
||||
pm.start_servers = 2
|
||||
pm.min_spare_servers = 2
|
||||
pm.max_spare_servers = 10
|
||||
'';
|
||||
"${poolName}" = {
|
||||
listen = phpfpmSocketName;
|
||||
phpOptions = ''
|
||||
extension = ${pkgs.phpPackages.imagick}/lib/php/extensions/imagick.so
|
||||
date.timezone = "${cfg.timezone}"
|
||||
'';
|
||||
extraConfig = ''
|
||||
listen.owner = nginx
|
||||
listen.group = nginx
|
||||
listen.mode = 0600
|
||||
user = icingaweb2
|
||||
pm = dynamic
|
||||
pm.max_children = 75
|
||||
pm.start_servers = 2
|
||||
pm.min_spare_servers = 2
|
||||
pm.max_spare_servers = 10
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
services.phpfpm.phpOptions = mkIf (cfg.pool == "${poolName}")
|
||||
''
|
||||
extension = ${pkgs.phpPackages.imagick}/lib/php/extensions/imagick.so
|
||||
date.timezone = "${cfg.timezone}"
|
||||
'';
|
||||
|
||||
systemd.services."phpfpm-${poolName}".serviceConfig.ReadWritePaths = [ "/etc/icingaweb2" ];
|
||||
|
||||
services.nginx = {
|
||||
|
@ -427,19 +427,19 @@ in {
|
||||
priority = 210;
|
||||
extraConfig = "return 301 $scheme://$host/remote.php/dav;";
|
||||
};
|
||||
"~ ^/(?:build|tests|config|lib|3rdparty|templates|data)/" = {
|
||||
"~ ^\\/(?:build|tests|config|lib|3rdparty|templates|data)\\/" = {
|
||||
priority = 300;
|
||||
extraConfig = "deny all;";
|
||||
};
|
||||
"~ ^/(?:\\.|autotest|occ|issue|indie|db_|console)" = {
|
||||
"~ ^\\/(?:\\.|autotest|occ|issue|indie|db_|console)" = {
|
||||
priority = 300;
|
||||
extraConfig = "deny all;";
|
||||
};
|
||||
"~ ^/(?:index|remote|public|cron|core/ajax/update|status|ocs/v[12]|updater/.+|ocs-provider/.+)\\.php(?:$|/)" = {
|
||||
"~ ^\\/(?:index|remote|public|cron|core/ajax\\/update|status|ocs\\/v[12]|updater\\/.+|ocs-provider\\/.+|ocm-provider\\/.+)\\.php(?:$|\\/)" = {
|
||||
priority = 500;
|
||||
extraConfig = ''
|
||||
include ${config.services.nginx.package}/conf/fastcgi.conf;
|
||||
fastcgi_split_path_info ^(.+\.php)(/.*)$;
|
||||
fastcgi_split_path_info ^(.+\.php)(\\/.*)$;
|
||||
fastcgi_param PATH_INFO $fastcgi_path_info;
|
||||
fastcgi_param HTTPS ${if cfg.https then "on" else "off"};
|
||||
fastcgi_param modHeadersAvailable true;
|
||||
@ -450,7 +450,7 @@ in {
|
||||
fastcgi_read_timeout 120s;
|
||||
'';
|
||||
};
|
||||
"~ ^/(?:updater|ocs-provider|ocm-provider)(?:$|\/)".extraConfig = ''
|
||||
"~ ^\\/(?:updater|ocs-provider|ocm-provider)(?:$|\\/)".extraConfig = ''
|
||||
try_files $uri/ =404;
|
||||
index index.php;
|
||||
'';
|
||||
|
@ -179,34 +179,35 @@ in
|
||||
config = mkIf cfg.enable {
|
||||
|
||||
services.phpfpm.poolConfigs = {
|
||||
"${poolName}" = ''
|
||||
listen = "${phpfpmSocketName}";
|
||||
listen.owner = nginx
|
||||
listen.group = nginx
|
||||
listen.mode = 0600
|
||||
user = ${cfg.user}
|
||||
group = ${cfg.group}
|
||||
pm = dynamic
|
||||
pm.max_children = 75
|
||||
pm.start_servers = 10
|
||||
pm.min_spare_servers = 5
|
||||
pm.max_spare_servers = 20
|
||||
pm.max_requests = 500
|
||||
catch_workers_output = 1
|
||||
'';
|
||||
"${poolName}" = {
|
||||
listen = phpfpmSocketName;
|
||||
phpOptions = ''
|
||||
date.timezone = "CET"
|
||||
|
||||
${optionalString (!isNull cfg.email.server) ''
|
||||
SMTP = ${cfg.email.server}
|
||||
smtp_port = ${toString cfg.email.port}
|
||||
auth_username = ${cfg.email.login}
|
||||
auth_password = ${cfg.email.password}
|
||||
''}
|
||||
'';
|
||||
extraConfig = ''
|
||||
listen.owner = nginx
|
||||
listen.group = nginx
|
||||
listen.mode = 0600
|
||||
user = ${cfg.user}
|
||||
group = ${cfg.group}
|
||||
pm = dynamic
|
||||
pm.max_children = 75
|
||||
pm.start_servers = 10
|
||||
pm.min_spare_servers = 5
|
||||
pm.max_spare_servers = 20
|
||||
pm.max_requests = 500
|
||||
catch_workers_output = 1
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
services.phpfpm.phpOptions = ''
|
||||
date.timezone = "CET"
|
||||
|
||||
${optionalString (!isNull cfg.email.server) ''
|
||||
SMTP = ${cfg.email.server}
|
||||
smtp_port = ${toString cfg.email.port}
|
||||
auth_username = ${cfg.email.login}
|
||||
auth_password = ${cfg.email.password}
|
||||
''}
|
||||
'';
|
||||
|
||||
services.nginx.enable = true;
|
||||
services.nginx.virtualHosts."${cfg.virtualHost.serverName}" = {
|
||||
listen = [ { addr = cfg.virtualHost.listenHost; port = cfg.virtualHost.listenPort; } ];
|
||||
|
@ -121,6 +121,7 @@ in
|
||||
environment.YOUTRACK_JVM_OPTS = "${extraAttr}";
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
path = with pkgs; [ unixtools.hostname ];
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
User = "youtrack";
|
||||
|
@ -194,11 +194,12 @@ let
|
||||
then filter (x: x.ssl) defaultListen
|
||||
else defaultListen;
|
||||
|
||||
listenString = { addr, port, ssl, ... }:
|
||||
listenString = { addr, port, ssl, extraParameters ? [], ... }:
|
||||
"listen ${addr}:${toString port} "
|
||||
+ optionalString ssl "ssl "
|
||||
+ optionalString (ssl && vhost.http2) "http2 "
|
||||
+ optionalString vhost.http2 "http2 "
|
||||
+ optionalString vhost.default "default_server "
|
||||
+ optionalString (extraParameters != []) (concatStringsSep " " extraParameters)
|
||||
+ ";";
|
||||
|
||||
redirectListen = filter (x: !x.ssl) defaultListen;
|
||||
@ -491,8 +492,8 @@ in
|
||||
|
||||
sslProtocols = mkOption {
|
||||
type = types.str;
|
||||
default = "TLSv1.2";
|
||||
example = "TLSv1 TLSv1.1 TLSv1.2";
|
||||
default = "TLSv1.2 TLSv1.3";
|
||||
example = "TLSv1 TLSv1.1 TLSv1.2 TLSv1.3";
|
||||
description = "Allowed TLS protocol versions.";
|
||||
};
|
||||
|
||||
|
@ -31,6 +31,7 @@ with lib;
|
||||
addr = mkOption { type = str; description = "IP address."; };
|
||||
port = mkOption { type = int; description = "Port number."; default = 80; };
|
||||
ssl = mkOption { type = bool; description = "Enable SSL."; default = false; };
|
||||
extraParameters = mkOption { type = listOf str; description = "Extra parameters of this listen directive."; default = []; example = [ "reuseport" "deferred" ]; };
|
||||
}; });
|
||||
default = [];
|
||||
example = [
|
||||
|
@ -56,9 +56,6 @@ in
|
||||
|
||||
export XDG_MENU_PREFIX=mate-
|
||||
|
||||
# Find the mouse
|
||||
export XCURSOR_PATH=~/.icons:${config.system.path}/share/icons
|
||||
|
||||
# Let caja find extensions
|
||||
export CAJA_EXTENSION_DIRS=$CAJA_EXTENSION_DIRS''${CAJA_EXTENSION_DIRS:+:}${config.system.path}/lib/caja/extensions-2.0
|
||||
|
||||
@ -78,9 +75,6 @@ in
|
||||
# Add mate-control-center paths to some XDG variables because its schemas are needed by mate-settings-daemon, and mate-settings-daemon is a dependency for mate-control-center (that is, they are mutually recursive)
|
||||
${addToXDGDirs pkgs.mate.mate-control-center}
|
||||
|
||||
# Update user dirs as described in http://freedesktop.org/wiki/Software/xdg-user-dirs/
|
||||
${pkgs.xdg-user-dirs}/bin/xdg-user-dirs-update
|
||||
|
||||
${pkgs.mate.mate-session-manager}/bin/mate-session ${optionalString cfg.debug "--debug"} &
|
||||
waitPID=$!
|
||||
'';
|
||||
@ -90,14 +84,20 @@ in
|
||||
pkgs.mate.basePackages ++
|
||||
(pkgs.gnome3.removePackagesByName
|
||||
pkgs.mate.extraPackages
|
||||
config.environment.mate.excludePackages);
|
||||
|
||||
services.dbus.packages = [
|
||||
pkgs.gnome3.dconf
|
||||
pkgs.at-spi2-core
|
||||
];
|
||||
config.environment.mate.excludePackages) ++
|
||||
[
|
||||
pkgs.desktop-file-utils
|
||||
pkgs.glib
|
||||
pkgs.gtk3.out
|
||||
pkgs.shared-mime-info
|
||||
pkgs.xdg-user-dirs # Update user dirs as described in https://freedesktop.org/wiki/Software/xdg-user-dirs/
|
||||
];
|
||||
|
||||
programs.dconf.enable = true;
|
||||
services.gnome3.at-spi2-core.enable = true;
|
||||
services.gnome3.gnome-keyring.enable = true;
|
||||
services.gnome3.gnome-settings-daemon.enable = true;
|
||||
services.gnome3.gnome-settings-daemon.package = pkgs.mate.mate-settings-daemon;
|
||||
services.gnome3.gvfs.enable = true;
|
||||
services.upower.enable = config.powerManagement.enable;
|
||||
|
||||
|
@ -185,6 +185,7 @@ in
|
||||
fonts.fonts = with pkgs; [
|
||||
opensans-ttf
|
||||
roboto-mono
|
||||
pantheon.elementary-redacted-script # needed by screenshot-tool
|
||||
];
|
||||
fonts.fontconfig.defaultFonts = {
|
||||
monospace = [ "Roboto Mono" ];
|
||||
|
@ -61,7 +61,9 @@ let
|
||||
'';
|
||||
description = ''
|
||||
Extra lines to append to the <literal>Monitor</literal> section
|
||||
verbatim.
|
||||
verbatim. Available options are documented in the MONITOR section in
|
||||
<citerefentry><refentrytitle>xorg.conf</refentrytitle>
|
||||
<manvolnum>5</manvolnum></citerefentry>.
|
||||
'';
|
||||
};
|
||||
};
|
||||
@ -633,7 +635,7 @@ in
|
||||
|
||||
environment.pathsToLink = [ "/share/X11" ];
|
||||
|
||||
xdg = {
|
||||
xdg = {
|
||||
autostart.enable = true;
|
||||
menus.enable = true;
|
||||
mime.enable = true;
|
||||
|
@ -130,11 +130,9 @@ let
|
||||
|
||||
failedAssertions = map (x: x.message) (filter (x: !x.assertion) config.assertions);
|
||||
|
||||
showWarnings = res: fold (w: x: builtins.trace "[1;31mwarning: ${w}[0m" x) res config.warnings;
|
||||
|
||||
baseSystemAssertWarn = if failedAssertions != []
|
||||
then throw "\nFailed assertions:\n${concatStringsSep "\n" (map (x: "- ${x}") failedAssertions)}"
|
||||
else showWarnings baseSystem;
|
||||
else showWarnings config.warnings baseSystem;
|
||||
|
||||
# Replace runtime dependencies
|
||||
system = fold ({ oldDependency, newDependency }: drv:
|
||||
|
@ -57,7 +57,7 @@ let cfg = config.system.autoUpgrade; in
|
||||
|
||||
};
|
||||
|
||||
config = {
|
||||
config = lib.mkIf cfg.enable {
|
||||
|
||||
system.autoUpgrade.flags =
|
||||
[ "--no-build-output" ]
|
||||
@ -84,7 +84,7 @@ let cfg = config.system.autoUpgrade; in
|
||||
${config.system.build.nixos-rebuild}/bin/nixos-rebuild switch ${toString cfg.flags}
|
||||
'';
|
||||
|
||||
startAt = optional cfg.enable cfg.dates;
|
||||
startAt = cfg.dates;
|
||||
};
|
||||
|
||||
};
|
||||
|
@ -103,16 +103,18 @@ let
|
||||
|
||||
script =
|
||||
''
|
||||
# Set the static DNS configuration, if given.
|
||||
${pkgs.openresolv}/sbin/resolvconf -m 1 -a static <<EOF
|
||||
${optionalString (cfg.nameservers != [] && cfg.domain != null) ''
|
||||
domain ${cfg.domain}
|
||||
${optionalString (!config.environment.etc?"resolv.conf") ''
|
||||
# Set the static DNS configuration, if given.
|
||||
${pkgs.openresolv}/sbin/resolvconf -m 1 -a static <<EOF
|
||||
${optionalString (cfg.nameservers != [] && cfg.domain != null) ''
|
||||
domain ${cfg.domain}
|
||||
''}
|
||||
${optionalString (cfg.search != []) ("search " + concatStringsSep " " cfg.search)}
|
||||
${flip concatMapStrings cfg.nameservers (ns: ''
|
||||
nameserver ${ns}
|
||||
'')}
|
||||
EOF
|
||||
''}
|
||||
${optionalString (cfg.search != []) ("search " + concatStringsSep " " cfg.search)}
|
||||
${flip concatMapStrings cfg.nameservers (ns: ''
|
||||
nameserver ${ns}
|
||||
'')}
|
||||
EOF
|
||||
|
||||
# Set the default gateway.
|
||||
${optionalString (cfg.defaultGateway != null && cfg.defaultGateway.address != "") ''
|
||||
|
@ -83,6 +83,8 @@ in
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable (mkMerge [{
|
||||
warnings = mkIf (config.nixpkgs.config.virtualbox.enableExtensionPack or false)
|
||||
["'nixpkgs.virtualbox.enableExtensionPack' has no effect, please use 'virtualisation.virtualbox.host.enableExtensionPack'"];
|
||||
boot.kernelModules = [ "vboxdrv" "vboxnetadp" "vboxnetflt" ];
|
||||
boot.extraModulePackages = [ kernelModules ];
|
||||
environment.systemPackages = [ virtualbox ];
|
||||
|
@ -34,8 +34,8 @@ import ./make-test.nix ({ pkgs, ... }: {
|
||||
|
||||
# To test the pullImage tool
|
||||
$docker->succeed("docker load --input='${pkgs.dockerTools.examples.nixFromDockerHub}'");
|
||||
$docker->succeed("docker run --rm nixos/nix:1.11 nix-store --version");
|
||||
$docker->succeed("docker rmi nixos/nix:1.11");
|
||||
$docker->succeed("docker run --rm nixos/nix:2.2.1 nix-store --version");
|
||||
$docker->succeed("docker rmi nixos/nix:2.2.1");
|
||||
|
||||
# To test runAsRoot and entry point
|
||||
$docker->succeed("docker load --input='${pkgs.dockerTools.examples.nginx}'");
|
||||
|
@ -273,6 +273,37 @@ let
|
||||
};
|
||||
};
|
||||
|
||||
makeLuksRootTest = name: luksFormatOpts: makeInstallerTest "luksroot-format2"
|
||||
{ createPartitions = ''
|
||||
$machine->succeed(
|
||||
"flock /dev/vda parted --script /dev/vda -- mklabel msdos"
|
||||
. " mkpart primary ext2 1M 50MB" # /boot
|
||||
. " mkpart primary linux-swap 50M 1024M"
|
||||
. " mkpart primary 1024M -1s", # LUKS
|
||||
"udevadm settle",
|
||||
"mkswap /dev/vda2 -L swap",
|
||||
"swapon -L swap",
|
||||
"modprobe dm_mod dm_crypt",
|
||||
"echo -n supersecret | cryptsetup luksFormat ${luksFormatOpts} -q /dev/vda3 -",
|
||||
"echo -n supersecret | cryptsetup luksOpen --key-file - /dev/vda3 cryptroot",
|
||||
"mkfs.ext3 -L nixos /dev/mapper/cryptroot",
|
||||
"mount LABEL=nixos /mnt",
|
||||
"mkfs.ext3 -L boot /dev/vda1",
|
||||
"mkdir -p /mnt/boot",
|
||||
"mount LABEL=boot /mnt/boot",
|
||||
);
|
||||
'';
|
||||
extraConfig = ''
|
||||
boot.kernelParams = lib.mkAfter [ "console=tty0" ];
|
||||
'';
|
||||
enableOCR = true;
|
||||
preBootCommands = ''
|
||||
$machine->start;
|
||||
$machine->waitForText(qr/Passphrase for/);
|
||||
$machine->sendChars("supersecret\n");
|
||||
'';
|
||||
};
|
||||
|
||||
|
||||
in {
|
||||
|
||||
@ -446,37 +477,14 @@ in {
|
||||
'';
|
||||
};
|
||||
|
||||
# Boot off an encrypted root partition
|
||||
luksroot = makeInstallerTest "luksroot"
|
||||
{ createPartitions = ''
|
||||
$machine->succeed(
|
||||
"flock /dev/vda parted --script /dev/vda -- mklabel msdos"
|
||||
. " mkpart primary ext2 1M 50MB" # /boot
|
||||
. " mkpart primary linux-swap 50M 1024M"
|
||||
. " mkpart primary 1024M -1s", # LUKS
|
||||
"udevadm settle",
|
||||
"mkswap /dev/vda2 -L swap",
|
||||
"swapon -L swap",
|
||||
"modprobe dm_mod dm_crypt",
|
||||
"echo -n supersecret | cryptsetup luksFormat -q /dev/vda3 -",
|
||||
"echo -n supersecret | cryptsetup luksOpen --key-file - /dev/vda3 cryptroot",
|
||||
"mkfs.ext3 -L nixos /dev/mapper/cryptroot",
|
||||
"mount LABEL=nixos /mnt",
|
||||
"mkfs.ext3 -L boot /dev/vda1",
|
||||
"mkdir -p /mnt/boot",
|
||||
"mount LABEL=boot /mnt/boot",
|
||||
);
|
||||
'';
|
||||
extraConfig = ''
|
||||
boot.kernelParams = lib.mkAfter [ "console=tty0" ];
|
||||
'';
|
||||
enableOCR = true;
|
||||
preBootCommands = ''
|
||||
$machine->start;
|
||||
$machine->waitForText(qr/Passphrase for/);
|
||||
$machine->sendChars("supersecret\n");
|
||||
'';
|
||||
};
|
||||
# Boot off an encrypted root partition with the default LUKS header format
|
||||
luksroot = makeLuksRootTest "luksroot-format1" "";
|
||||
|
||||
# Boot off an encrypted root partition with LUKS1 format
|
||||
luksroot-format1 = makeLuksRootTest "luksroot-format1" "--type=LUKS1";
|
||||
|
||||
# Boot off an encrypted root partition with LUKS2 format
|
||||
luksroot-format2 = makeLuksRootTest "luksroot-format2" "--type=LUKS2";
|
||||
|
||||
# Test whether opening encrypted filesystem with keyfile
|
||||
# Checks for regression of missing cryptsetup, when no luks device without
|
||||
|
@ -1,37 +1,37 @@
|
||||
{ stdenv, python3, pkgconfig, which, libtool, autoconf, automake,
|
||||
autogen, sqlite, gmp, zlib, fetchzip }:
|
||||
autogen, sqlite, gmp, zlib, fetchurl, unzip, fetchpatch }:
|
||||
|
||||
with stdenv.lib;
|
||||
stdenv.mkDerivation rec {
|
||||
name = "clightning-${version}";
|
||||
version = "0.6.3";
|
||||
version = "0.7.0";
|
||||
|
||||
src = fetchzip {
|
||||
#
|
||||
# NOTE 0.6.3 release zip was bugged, this zip is a fix provided by the team
|
||||
# https://github.com/ElementsProject/lightning/issues/2254#issuecomment-453791475
|
||||
#
|
||||
# replace url with:
|
||||
# https://github.com/ElementsProject/lightning/releases/download/v${version}/clightning-v${version}.zip
|
||||
# for future relases
|
||||
#
|
||||
url = "https://github.com/ElementsProject/lightning/files/2752675/clightning-v0.6.3.zip";
|
||||
sha256 = "0k5pwimwn69pcakiq4a7qnjyf4i8w1jlacwrjazm1sfivr6nfiv6";
|
||||
src = fetchurl {
|
||||
url = "https://github.com/ElementsProject/lightning/releases/download/v${version}/clightning-v${version}.zip";
|
||||
sha256 = "448022c2433cbf19bbd0f726344b0500c0c21ee5cc2291edf6b622f094cb3a15";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
nativeBuildInputs = [ autoconf autogen automake libtool pkgconfig which ];
|
||||
nativeBuildInputs = [ autoconf autogen automake libtool pkgconfig which unzip ];
|
||||
buildInputs = [ sqlite gmp zlib python3 ];
|
||||
|
||||
makeFlags = [ "prefix=$(out) VERSION=v${version}" ];
|
||||
|
||||
patches = [
|
||||
# remove after 0.7.0
|
||||
(fetchpatch {
|
||||
name = "fix-0.7.0-build.patch";
|
||||
url = "https://github.com/ElementsProject/lightning/commit/ffc03d2bc84dc42f745959fbb6c8007cf0a6f701.patch";
|
||||
sha256 = "1m5fiz3m8k3nk09nldii8ij94bg6fqllqgdbiwj3sy12vihs8c4v";
|
||||
})
|
||||
];
|
||||
|
||||
configurePhase = ''
|
||||
./configure --prefix=$out --disable-developer --disable-valgrind
|
||||
'';
|
||||
|
||||
postPatch = ''
|
||||
echo "" > tools/refresh-submodules.sh
|
||||
patchShebangs tools/generate-wire.py
|
||||
'';
|
||||
|
||||
|
@ -17,11 +17,11 @@ with stdenv.lib;
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "particl-core-${version}";
|
||||
version = "0.17.0.2";
|
||||
version = "0.17.1.2";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://github.com/particl/particl-core/archive/v${version}.tar.gz";
|
||||
sha256 = "0bkxdayl0jrfhgz8qzqqpwzv0yavz3nwsn6c8k003jnbcw65fkhx";
|
||||
sha256 = "16hcyxwp6yrypwvxz6i2987z3jmpk47xcgnsgh9klih8baqg64p5";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ pkgconfig autoreconfHook ];
|
||||
|
@ -11,12 +11,12 @@ with stdenv.lib;
|
||||
stdenv.mkDerivation rec {
|
||||
name = "wownero-${version}";
|
||||
|
||||
version = "0.5.0.0";
|
||||
version = "0.5.0.2";
|
||||
src = fetchFromGitHub {
|
||||
owner = "wownero";
|
||||
repo = "wownero";
|
||||
rev = "v${version}";
|
||||
sha256 = "1dy9ycabva2z0896al1k2avl9xppkxvm1p2jwmg509ahjl98k3sy";
|
||||
sha256 = "120cfkl2q8qgl3ajxfkkri9bxlnvmr1mhb1wvcigch1lqyflff1w";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ cmake pkgconfig git ];
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "bs1770gain-${version}";
|
||||
version = "0.5.1";
|
||||
version = "0.5.2";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://sourceforge/bs1770gain/${name}.tar.gz";
|
||||
sha256 = "0r4fbajgfmnwgl63hcm56f1j8m5f135q6j5jkzdvrrhpcj39yx06";
|
||||
sha256 = "1p6yz5q7czyf9ard65sp4kawdlkg40cfscr3b24znymmhs3p7rbk";
|
||||
};
|
||||
|
||||
buildInputs = [ ffmpeg sox ];
|
||||
|
@ -8,6 +8,7 @@
|
||||
, samplerateSupport ? jackSupport, libsamplerate ? null
|
||||
, ossSupport ? false, alsaOss ? null
|
||||
, pulseaudioSupport ? config.pulseaudio or false, libpulseaudio ? null
|
||||
, mprisSupport ? stdenv.isLinux, systemd ? null
|
||||
|
||||
# TODO: add these
|
||||
#, artsSupport
|
||||
@ -60,6 +61,7 @@ let
|
||||
(mkFlag samplerateSupport "CONFIG_SAMPLERATE=y" libsamplerate)
|
||||
(mkFlag ossSupport "CONFIG_OSS=y" alsaOss)
|
||||
(mkFlag pulseaudioSupport "CONFIG_PULSE=y" libpulseaudio)
|
||||
(mkFlag mprisSupport "CONFIG_MPRIS=y" systemd)
|
||||
|
||||
#(mkFlag artsSupport "CONFIG_ARTS=y")
|
||||
#(mkFlag roarSupport "CONFIG_ROAR=y")
|
||||
|
@ -47,13 +47,13 @@ let
|
||||
];
|
||||
in stdenv.mkDerivation rec {
|
||||
pname = "pulseeffects";
|
||||
version = "4.4.7";
|
||||
version = "4.5.5";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "wwmm";
|
||||
repo = "pulseeffects";
|
||||
rev = "v${version}";
|
||||
sha256 = "14sxwy3mayzn9k5hy58mjzhxaj4wqxvs257xaj03mwvm48k7c7ia";
|
||||
sha256 = "0ll85c9xll2i42r1bdgbnxi5mc5jq2vxgp179jj2iq7wczq5psj1";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
@ -74,8 +74,8 @@ in stdenv.mkDerivation rec {
|
||||
gtk3
|
||||
gtkmm3
|
||||
gst_all_1.gstreamer
|
||||
gst_all_1.gst-plugins-base
|
||||
gst_all_1.gst-plugins-good
|
||||
gst_all_1.gst-plugins-base # gst-fft
|
||||
gst_all_1.gst-plugins-good # pulsesrc
|
||||
gst_all_1.gst-plugins-bad
|
||||
lilv lv2 serd sord sratom
|
||||
libbs2b
|
||||
|
@ -1,4 +1,4 @@
|
||||
{ stdenv, fetchFromGitHub, pkgconfig, intltool, wrapGAppsHook
|
||||
{ stdenv, fetchFromGitHub, pkgconfig, gettext, intltool, wrapGAppsHook
|
||||
, python3Packages, gnome3, gtk3, gobject-introspection}:
|
||||
|
||||
let
|
||||
@ -16,7 +16,7 @@ in buildPythonApplication rec {
|
||||
|
||||
disabled = !isPy3k;
|
||||
|
||||
nativeBuildInputs = [ pkgconfig ];
|
||||
nativeBuildInputs = [ pkgconfig gettext ];
|
||||
buildInputs = [
|
||||
intltool wrapGAppsHook
|
||||
gnome3.adwaita-icon-theme
|
||||
|
@ -21,6 +21,10 @@ stdenv.mkDerivation rec {
|
||||
substituteInPlace bin/areca_run.sh --replace "/usr/java" "${jre}/lib/openjdk"
|
||||
substituteInPlace bin/areca_run.sh --replace "/usr/lib/java/swt.jar" "${swt}/jars/swt.jar"
|
||||
|
||||
# Fix for NixOS/nixpkgs/issues/53716
|
||||
sed -i -e 's;^;#include <attr/attributes.h>;' jni/com_myJava_file_metadata_posix_jni_wrapper_FileAccessWrapper.c
|
||||
substituteInPlace jni/com_myJava_file_metadata_posix_jni_wrapper_FileAccessWrapper.c --replace attr/xattr.h sys/xattr.h
|
||||
|
||||
sed -i "s#^PROGRAM_DIR.*#PROGRAM_DIR=$out#g" bin/areca_run.sh
|
||||
sed -i "s#^LIBRARY_PATH.*#LIBRARY_PATH=$out/lib:${stdenv.lib.makeLibraryPath [ swt acl ]}#g" bin/areca_run.sh
|
||||
|
||||
|
@ -8,14 +8,14 @@ let
|
||||
inherit (gnome2) GConf gnome_vfs;
|
||||
};
|
||||
stableVersion = {
|
||||
version = "3.3.1.0"; # "Android Studio 3.3.1"
|
||||
build = "182.5264788";
|
||||
sha256Hash = "0fghqkc8pkb7waxclm0qq4nlnsvmv9d3fcj5nnvgbfkjyw032q42";
|
||||
version = "3.3.2.0"; # "Android Studio 3.3.2"
|
||||
build = "182.5314842";
|
||||
sha256Hash = "0smh3d3v8n0isxg7fkls20622gp52f58i2b6wa4a0g8wnvmd6mw2";
|
||||
};
|
||||
betaVersion = {
|
||||
version = "3.4.0.14"; # "Android Studio 3.4 Beta 5"
|
||||
build = "183.5310756";
|
||||
sha256Hash = "0np8600qvqpw9kcmgp04i1nak1339ck1iidkzr75kigp5rgdl2bq";
|
||||
version = "3.4.0.15"; # "Android Studio 3.4 RC 1"
|
||||
build = "183.5341121";
|
||||
sha256Hash = "0s7wadnzbrd031ls43b5nbh1nx0paj74bxy2yiczr4qb9n562zzy";
|
||||
};
|
||||
latestVersion = { # canary & dev
|
||||
version = "3.5.0.5"; # "Android Studio 3.5 Canary 6"
|
||||
|
@ -10,6 +10,7 @@
|
||||
, withXwidgets ? false, webkitgtk ? null, wrapGAppsHook ? null, glib-networking ? null
|
||||
, withCsrc ? true
|
||||
, srcRepo ? false, autoconf ? null, automake ? null, texinfo ? null
|
||||
, siteStart ? ./site-start.el
|
||||
}:
|
||||
|
||||
assert (libXft != null) -> libpng != null; # probably a bug
|
||||
@ -100,7 +101,7 @@ stdenv.mkDerivation rec {
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p $out/share/emacs/site-lisp
|
||||
cp ${./site-start.el} $out/share/emacs/site-lisp/site-start.el
|
||||
cp ${siteStart} $out/share/emacs/site-lisp/site-start.el
|
||||
$out/bin/emacs --batch -f batch-byte-compile $out/share/emacs/site-lisp/site-start.el
|
||||
|
||||
rm -rf $out/var
|
||||
|
@ -3,13 +3,13 @@
|
||||
with qt5;
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "0.9.2";
|
||||
version = "0.9.4";
|
||||
name = "featherpad-${version}";
|
||||
src = fetchFromGitHub {
|
||||
owner = "tsujan";
|
||||
repo = "FeatherPad";
|
||||
rev = "V${version}";
|
||||
sha256 = "1kpv8x3m4hiz7q9k7qadgbrys5nyzm7v5mhjyk22hawnp98m9x4q";
|
||||
sha256 = "18zna6rx2qyiplr44wrkvr4avk9yy2l1s23fy3d7ql9f1fq12z3w";
|
||||
};
|
||||
nativeBuildInputs = [ qmake pkgconfig qttools ];
|
||||
buildInputs = [ qtbase qtsvg qtx11extras ];
|
||||
|
@ -4,13 +4,13 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "quilter";
|
||||
version = "1.7.0";
|
||||
version = "1.7.5";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "lainsce";
|
||||
repo = pname;
|
||||
rev = version;
|
||||
sha256 = "0dqji6zwpn0k89mpmh10rq59hzrq8kqr30dz1hp06ygk8rlnv2ys";
|
||||
sha256 = "0czf6rm908pz6zwiaq2phci923q8xa8x7q7kvdk6s3km4i1rrgkn";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
@ -1,8 +1,9 @@
|
||||
{buildVersion, x32sha256, x64sha256}:
|
||||
{buildVersion, x32sha256, x64sha256, dev ? false}:
|
||||
|
||||
{ fetchurl, stdenv, glib, xorg, cairo, gtk2, gtk3, pango, makeWrapper, wrapGAppsHook, openssl, bzip2, runtimeShell,
|
||||
pkexecPath ? "/run/wrappers/bin/pkexec", libredirect,
|
||||
gksuSupport ? false, gksu, unzip, zip, bash}:
|
||||
gksuSupport ? false, gksu, unzip, zip, bash,
|
||||
writeScript, common-updater-scripts, curl, gnugrep}:
|
||||
|
||||
assert gksuSupport -> gksu != null;
|
||||
|
||||
@ -26,7 +27,7 @@ in let
|
||||
|
||||
# package with just the binaries
|
||||
sublime = stdenv.mkDerivation {
|
||||
name = "sublimetext3-${buildVersion}-bin";
|
||||
name = "sublimetext3-bin-${buildVersion}";
|
||||
src =
|
||||
fetchurl {
|
||||
name = "sublimetext-${buildVersion}.tar.bz2";
|
||||
@ -127,6 +128,22 @@ in stdenv.mkDerivation (rec {
|
||||
ln -s $sublime/Icon/256x256/ $out/share/icons
|
||||
'';
|
||||
|
||||
passthru.updateScript = writeScript "sublime3-update-script" ''
|
||||
#!${stdenv.shell}
|
||||
set -o errexit
|
||||
PATH=${stdenv.lib.makeBinPath [ common-updater-scripts curl gnugrep ]}
|
||||
|
||||
latestVersion=$(curl https://www.sublimetext.com/3${stdenv.lib.optionalString dev "dev"} | grep -Po '(?<=<p class="latest"><i>Version:</i> Build )([0-9]+)')
|
||||
|
||||
for platform in ${stdenv.lib.concatStringsSep " " meta.platforms}; do
|
||||
package=sublime3${stdenv.lib.optionalString dev "-dev"}
|
||||
# The script will not perform an update when the version attribute is up to date from previous platform run
|
||||
# We need to clear it before each run
|
||||
update-source-version ''${package}.sublime 0 0000000000000000000000000000000000000000000000000000000000000000 --file=pkgs/applications/editors/sublime/3/packages.nix --version-key=buildVersion --system=$platform
|
||||
update-source-version ''${package}.sublime $latestVersion --file=pkgs/applications/editors/sublime/3/packages.nix --version-key=buildVersion --system=$platform
|
||||
done
|
||||
'';
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "Sophisticated text editor for code, markup and prose";
|
||||
homepage = https://www.sublimetext.com/;
|
||||
|
@ -6,6 +6,7 @@ in
|
||||
rec {
|
||||
sublime3-dev = common {
|
||||
buildVersion = "3184";
|
||||
dev = true;
|
||||
x32sha256 = "1b6f1fid75g5z247dbnyyj276lrlv99scrdk1vvfcr6vyws77vzr";
|
||||
x64sha256 = "03127jhfjr17ai96p3axh5b5940fds8jcw6vkid8y6dmvd2dpylz";
|
||||
} {};
|
||||
|
@ -18,16 +18,16 @@ let
|
||||
}.${system};
|
||||
|
||||
sha256 = {
|
||||
"i686-linux" = "04kbx1cx40lsy9irxy1arp1rixzk49ldhg34w3llmfbx63a4hchf";
|
||||
"x86_64-linux" = "1plvx0mjcbizl6iffib95p5224r9frf0mn6c5xp14p3qnrp32jhm";
|
||||
"x86_64-darwin" = "14h9gs6jpxydgd1h16ybq3ifw5jc7k83yg22pw3sk6vhy7hx7pxr";
|
||||
"i686-linux" = "0iqsbny25946fyvrm8qwgbd1xmwb8psg2n2c4wdk8x52259pxfvq";
|
||||
"x86_64-linux" = "0v1gbaqlaismrykl8igks5dl9bh5xh56v5aw8mffg8wxdr0alrvv";
|
||||
"x86_64-darwin" = "0awq1jgqbpirrhs09x7hn4m96idb4lazm053nf5jf5yrx8pq1l9i";
|
||||
}.${system};
|
||||
|
||||
archive_fmt = if system == "x86_64-darwin" then "zip" else "tar.gz";
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "vscode-${version}";
|
||||
version = "1.31.1";
|
||||
version = "1.32.1";
|
||||
|
||||
src = fetchurl {
|
||||
name = "VSCode_${version}_${plat}.${archive_fmt}";
|
||||
|
@ -84,6 +84,7 @@ stdenv.mkDerivation rec {
|
||||
description = "A software suite to create, edit, compose, or convert bitmap images";
|
||||
platforms = platforms.linux ++ platforms.darwin;
|
||||
license = licenses.asl20;
|
||||
broken = ghostscript != null; # https://github.com/NixOS/nixpkgs/issues/55118
|
||||
maintainers = with maintainers; [ the-kenny ];
|
||||
};
|
||||
}
|
||||
|
@ -100,6 +100,7 @@ stdenv.mkDerivation rec {
|
||||
description = "A software suite to create, edit, compose, or convert bitmap images";
|
||||
platforms = platforms.linux ++ platforms.darwin;
|
||||
maintainers = with maintainers; [ the-kenny ];
|
||||
broken = ghostscript != null; # https://github.com/NixOS/nixpkgs/issues/55118
|
||||
license = licenses.asl20;
|
||||
};
|
||||
}
|
||||
|
@ -23,6 +23,7 @@
|
||||
, boost
|
||||
, eigen
|
||||
, exiv2
|
||||
, ffmpeg
|
||||
, flex
|
||||
, jasper
|
||||
, lcms2
|
||||
@ -66,6 +67,7 @@ mkDerivation rec {
|
||||
boost
|
||||
eigen
|
||||
exiv2
|
||||
ffmpeg
|
||||
flex
|
||||
jasper
|
||||
lcms2
|
||||
|
@ -7,7 +7,7 @@ stdenv.mkDerivation rec {
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://dl.bintray.com/otfried/generic/ipe/7.2/${name}-src.tar.gz";
|
||||
sha256 = "0rm31kvyg30452bz12yi49bkhdmi4bjdx6zann5cdlbi0pvmx7xh";
|
||||
sha256 = "0gw45d0albrsa0pbc5g4w3fmmjfxrdbpzyc7723ncbhncdyda01h";
|
||||
};
|
||||
|
||||
sourceRoot = "${name}/src";
|
||||
|
74
pkgs/applications/graphics/xournalpp/default.nix
Normal file
74
pkgs/applications/graphics/xournalpp/default.nix
Normal file
@ -0,0 +1,74 @@
|
||||
{ stdenv
|
||||
, lib
|
||||
, fetchFromGitHub
|
||||
, fetchpatch
|
||||
|
||||
, cmake
|
||||
, gettext
|
||||
, wrapGAppsHook
|
||||
, pkgconfig
|
||||
|
||||
, glib
|
||||
, gsettings-desktop-schemas
|
||||
, gtk3
|
||||
, hicolor-icon-theme
|
||||
, libsndfile
|
||||
, libxml2
|
||||
, pcre
|
||||
, poppler
|
||||
, portaudio
|
||||
, zlib
|
||||
|
||||
# Plugins don't appear to be working in this version, so disable them by not
|
||||
# building with Lua support by default. In a future version, try switching this
|
||||
# to 'true' and seeing if the top-level Plugin menu appears.
|
||||
, withLua ? false, lua
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "xournalpp-${version}";
|
||||
version = "1.0.8";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "xournalpp";
|
||||
repo = "xournalpp";
|
||||
rev = version;
|
||||
sha256 = "01q84xjp9z1krna10gjj562km6i3wdq8cg7paxax1k6bh52ryvf6";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# This patch removes the unused 'xopp-recording.sh' file which breaks the
|
||||
# cmake build; this patch isn't in a release yet, and should be removed at
|
||||
# or after 1.0.9 is released.
|
||||
(fetchpatch {
|
||||
name = "remove-xopp-recording.sh.patch";
|
||||
url = "https://github.com/xournalpp/xournalpp/commit/a17a3f2c80c607a22d0fdeb66d38358bea7e4d85.patch";
|
||||
sha256 = "10pcpvklm6kr0lv2xrsbpg2037ni9j6dmxgjf56p466l3gz60iwy";
|
||||
})
|
||||
];
|
||||
|
||||
nativeBuildInputs = [ cmake gettext pkgconfig wrapGAppsHook ];
|
||||
buildInputs =
|
||||
[ glib
|
||||
gsettings-desktop-schemas
|
||||
gtk3
|
||||
hicolor-icon-theme
|
||||
libsndfile
|
||||
libxml2
|
||||
pcre
|
||||
poppler
|
||||
portaudio
|
||||
zlib
|
||||
]
|
||||
++ lib.optional withLua lua;
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "Xournal++ is a handwriting Notetaking software with PDF annotation support";
|
||||
homepage = https://github.com/xournalpp/xournalpp;
|
||||
license = licenses.gpl2;
|
||||
maintainers = with maintainers; [ andrew-d ];
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
}
|
56
pkgs/applications/misc/appeditor/default.nix
Normal file
56
pkgs/applications/misc/appeditor/default.nix
Normal file
@ -0,0 +1,56 @@
|
||||
{ stdenv
|
||||
, fetchFromGitHub
|
||||
, meson
|
||||
, ninja
|
||||
, pkgconfig
|
||||
, pantheon
|
||||
, python3
|
||||
, gettext
|
||||
, glib
|
||||
, gtk3
|
||||
, hicolor-icon-theme
|
||||
, libgee
|
||||
, wrapGAppsHook }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "appeditor";
|
||||
version = "1.1.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "donadigo";
|
||||
repo = "appeditor";
|
||||
rev = version;
|
||||
sha256 = "04x2f4x4dp5ca2y3qllqjgirbyl6383pfl4bi9bkcqlg8b5081rg";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
gettext
|
||||
meson
|
||||
ninja
|
||||
pantheon.vala
|
||||
pkgconfig
|
||||
python3
|
||||
wrapGAppsHook
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
glib
|
||||
gtk3
|
||||
hicolor-icon-theme
|
||||
pantheon.granite
|
||||
libgee
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
chmod +x meson/post_install.py
|
||||
patchShebangs meson/post_install.py
|
||||
'';
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "Edit the Pantheon desktop application menu";
|
||||
homepage = https://github.com/donadigo/appeditor;
|
||||
maintainers = with maintainers; [ kjuvi ] ++ pantheon.maintainers;
|
||||
platforms = platforms.linux;
|
||||
license = licenses.gpl3;
|
||||
};
|
||||
}
|
@ -5,12 +5,12 @@
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "3.39.1";
|
||||
version = "3.40.1";
|
||||
name = "calibre-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://download.calibre-ebook.com/${version}/${name}.tar.xz";
|
||||
sha256 = "08c1wsdn0giv9zfb6bis9bbrw687rci8fs26qsal8ijmjk55dfsh";
|
||||
sha256 = "1s1kq8axfymr7agg7dqw47kanlrkzzhsy8pcj1fs5644zjp5n0bq";
|
||||
};
|
||||
|
||||
patches = [
|
||||
|
@ -1,30 +1,37 @@
|
||||
{ stdenv, python3Packages, fetchFromGitHub }:
|
||||
{ stdenv, python3, fetchFromGitHub }:
|
||||
|
||||
with python3Packages;
|
||||
with python3.pkgs;
|
||||
buildPythonApplication rec {
|
||||
name = "${pname}-${version}";
|
||||
pname = "cheat";
|
||||
version = "2.3.1";
|
||||
version = "2.5.1";
|
||||
|
||||
propagatedBuildInputs = [ docopt pygments ];
|
||||
propagatedBuildInputs = [ docopt pygments termcolor ];
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "chrisallenlane";
|
||||
repo = "cheat";
|
||||
rev = version;
|
||||
sha256 = "1dcpjvbv648r8325qjf30m8b4cyrrjbzc2kvh40zy2mbjsa755zr";
|
||||
sha256 = "1i543hvg1yizamfd83bawflfcb500hvc72i59ikck8j1hjk50hsl";
|
||||
};
|
||||
# no tests available
|
||||
doCheck = false;
|
||||
|
||||
postInstall = ''
|
||||
install -D man1/cheat.1.gz $out/share/man/man1/cheat.1.gz
|
||||
mv $out/${python3.sitePackages}/etc $out/
|
||||
mv $out/${python3.sitePackages}/usr/share/* $out/share/
|
||||
rm -r $out/${python3.sitePackages}/usr
|
||||
'';
|
||||
|
||||
makeWrapperArgs = [
|
||||
"--suffix" "CHEAT_PATH" ":" "$out/share/cheat"
|
||||
];
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "cheat allows you to create and view interactive cheatsheets on the command-line";
|
||||
maintainers = with maintainers; [ mic92 ];
|
||||
license = with licenses; [gpl3 mit];
|
||||
license = with licenses; [ gpl3 mit ];
|
||||
homepage = https://github.com/chrisallenlane/cheat;
|
||||
};
|
||||
}
|
||||
|
@ -4,11 +4,11 @@ with stdenv.lib;
|
||||
stdenv.mkDerivation rec {
|
||||
|
||||
name = "cherrytree-${version}";
|
||||
version = "0.38.7";
|
||||
version = "0.38.8";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://www.giuspen.com/software/${name}.tar.xz";
|
||||
sha256 = "1ls7vz993hj5gd99imlrzahxznfg6fa4n77ikkj79va4csw9b892";
|
||||
sha256 = "1ns87xl2sgrf3nha4xkhp0xcxlycqszlp6xdrn95lg6vzm0fa8dg";
|
||||
};
|
||||
|
||||
buildInputs = with pythonPackages;
|
||||
|
@ -1,4 +1,4 @@
|
||||
{ stdenv, fetchFromGitHub, fetchpatch, cmake, libarcus, stb }:
|
||||
{ stdenv, fetchFromGitHub, fetchpatch, cmake, libarcus, stb, protobuf }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "curaengine-${version}";
|
||||
@ -12,7 +12,7 @@ stdenv.mkDerivation rec {
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ cmake ];
|
||||
buildInputs = [ libarcus stb ];
|
||||
buildInputs = [ libarcus stb protobuf ];
|
||||
|
||||
cmakeFlags = [ "-DCURA_ENGINE_VERSION=${version}" ];
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user