Merge remote-tracking branch 'upstream/master' into openssl-1.1

This commit is contained in:
Robin Gloster 2019-08-02 02:58:52 +02:00
commit 30969073f0
No known key found for this signature in database
GPG key ID: D5C458DF6DD97EDF
316 changed files with 47697 additions and 39381 deletions

View file

@ -18,4 +18,6 @@
- [ ] Ensured that relevant documentation is up to date
- [ ] Fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/.github/CONTRIBUTING.md).
---
###### Notify maintainers
cc @

View file

@ -786,7 +786,7 @@ passthru.updateScript = [ ../../update.sh pname "--requested-release=unstable" ]
set, the default value is used, which is <literal>$prePhases
unpackPhase patchPhase $preConfigurePhases configurePhase
$preBuildPhases buildPhase checkPhase $preInstallPhases installPhase
fixupPhase $preDistPhases distPhase $postPhases</literal>.
fixupPhase installCheckPhase $preDistPhases distPhase $postPhases</literal>.
</para>
<para>
Usually, if you just want to add a few phases, its more convenient

View file

@ -111,7 +111,7 @@ rec {
name = "int";
description = "signed integer";
check = isInt;
merge = mergeOneOption;
merge = mergeEqualOption;
};
# Specialized subdomains of int
@ -176,14 +176,14 @@ rec {
name = "float";
description = "floating point number";
check = isFloat;
merge = mergeOneOption;
merge = mergeEqualOption;
};
str = mkOptionType {
name = "str";
description = "string";
check = isString;
merge = mergeOneOption;
merge = mergeEqualOption;
};
strMatching = pattern: mkOptionType {
@ -243,7 +243,7 @@ rec {
name = "path";
# Hacky: there is no isPath primop.
check = x: builtins.substring 0 1 (toString x) == "/";
merge = mergeOneOption;
merge = mergeEqualOption;
};
# drop this in the future:
@ -415,7 +415,7 @@ rec {
name = "enum";
description = "one of ${concatMapStringsSep ", " show values}";
check = flip elem values;
merge = mergeOneOption;
merge = mergeEqualOption;
functor = (defaultFunctor name) // { payload = values; binOp = a: b: unique (a ++ b); };
};

View file

@ -393,6 +393,11 @@
github = "aneeshusa";
name = "Aneesh Agrawal";
};
angristan = {
email = "angristan@pm.me";
github = "angristan";
name = "Stanislas Lange";
};
ankhers = {
email = "justin.k.wood@gmail.com";
github = "ankhers";
@ -2156,6 +2161,11 @@
github = "hlolli";
name = "Hlodver Sigurdsson";
};
hugoreeves = {
email = "hugolreeves@gmail.com";
github = "hugoreeves";
name = "Hugo Reeves";
};
hodapp = {
email = "hodapp87@gmail.com";
github = "Hodapp87";

View file

@ -163,7 +163,8 @@
Most of the httpd subservices packaged with NixOS have been replaced with
full NixOS modules including LimeSurvey, WordPress, and Zabbix. These
modules can be enabled using the <option>services.limesurvey.enable</option>,
<option>services.wordpress.enable</option>, and <option>services.zabbixWeb.enable</option> options.
<option>services.mediawiki.enable</option>, <option>services.wordpress.enable</option>,
and <option>services.zabbixWeb.enable</option> options.
</para>
</listitem>
<listitem>
@ -220,6 +221,9 @@
<link xlink:href="https://wiki.shibboleth.net/confluence/display/SP3/ReleaseNotes">release notes</link>
and <link xlink:href="https://wiki.shibboleth.net/confluence/display/SP3/UpgradingFromV2">upgrade guide</link>.
</para>
<para>
Nodejs 8 is scheduled EOL under the lifetime of 19.09 and has been dropped.
</para>
</listitem>
</itemizedlist>
</section>

View file

@ -1,10 +1,12 @@
{ config, pkgs ,lib ,... }:
with lib;
{
options.xdg.portal = {
enable =
mkEnableOption "<link xlink:href='https://github.com/flatpak/xdg-desktop-portal'>xdg desktop integration</link>"//{
default = config.services.xserver.enable;
default = false;
};
extraPortals = mkOption {
@ -19,6 +21,17 @@ with lib;
environments you probably want to add them yourself.
'';
};
gtkUsePortal = mkOption {
type = types.bool;
default = false;
description = ''
Sets environment variable <literal>GTK_USE_PORTAL</literal> to <literal>1</literal>.
This is needed for packages ran outside Flatpak to respect and use XDG Desktop Portals.
For example, you'd need to set this for non-flatpak Firefox to use native filechoosers.
Defaults to <literal>false</literal> to respect its opt-in nature.
'';
};
};
config =
@ -28,10 +41,17 @@ with lib;
in mkIf cfg.enable {
assertions = [
{ assertion = (cfg.gtkUsePortal -> cfg.extraPortals != []);
message = "Setting xdg.portal.gtkUsePortal to true requires a portal implementation in xdg.portal.extraPortals such as xdg-desktop-portal-gtk or xdg-desktop-portal-kde.";
}
];
services.dbus.packages = packages;
systemd.packages = packages;
environment.variables = {
GTK_USE_PORTAL = "1";
GTK_USE_PORTAL = mkIf cfg.gtkUsePortal "1";
XDG_DESKTOP_PORTAL_PATH = map (p: "${p}/share/xdg-desktop-portal/portals") cfg.extraPortals;
};
};

View file

@ -519,6 +519,7 @@
./services/monitoring/systemhealth.nix
./services/monitoring/teamviewer.nix
./services/monitoring/telegraf.nix
./services/monitoring/thanos.nix
./services/monitoring/ups.nix
./services/monitoring/uptime.nix
./services/monitoring/vnstat.nix
@ -780,6 +781,7 @@
./services/web-apps/icingaweb2/module-monitoring.nix
./services/web-apps/limesurvey.nix
./services/web-apps/mattermost.nix
./services/web-apps/mediawiki.nix
./services/web-apps/miniflux.nix
./services/web-apps/nextcloud.nix
./services/web-apps/nexus.nix

View file

@ -26,6 +26,8 @@ with lib;
security.allowSimultaneousMultithreading = mkDefault false;
security.forcePageTableIsolation = mkDefault true;
security.virtualisation.flushL1DataCache = mkDefault "always";
security.apparmor.enable = mkDefault true;
@ -42,9 +44,6 @@ with lib;
# Disable legacy virtual syscalls
"vsyscall=none"
# Enable PTI even if CPU claims to be safe from meltdown
"pti=on"
];
boot.blacklistedKernelModules = [

View file

@ -12,7 +12,7 @@ with lib;
''
# Set up the per-user profile.
mkdir -m 0755 -p "$NIX_USER_PROFILE_DIR"
if [ "$(stat --printf '%u' "$NIX_USER_PROFILE_DIR")" != "$(id -u)" ]; then
if [ "$(stat -c '%u' "$NIX_USER_PROFILE_DIR")" != "$(id -u)" ]; then
echo "WARNING: the per-user profile dir $NIX_USER_PROFILE_DIR should belong to user id $(id -u)" >&2
fi
@ -34,7 +34,7 @@ with lib;
# Create the per-user garbage collector roots directory.
NIX_USER_GCROOTS_DIR="/nix/var/nix/gcroots/per-user/$USER"
mkdir -m 0755 -p "$NIX_USER_GCROOTS_DIR"
if [ "$(stat --printf '%u' "$NIX_USER_GCROOTS_DIR")" != "$(id -u)" ]; then
if [ "$(stat -c '%u' "$NIX_USER_GCROOTS_DIR")" != "$(id -u)" ]; then
echo "WARNING: the per-user gcroots dir $NIX_USER_GCROOTS_DIR should belong to user id $(id -u)" >&2
fi

View file

@ -21,7 +21,7 @@ let
knownHostsText = (flip (concatMapStringsSep "\n") knownHosts
(h: assert h.hostNames != [];
concatStringsSep "," h.hostNames + " "
optionalString h.certAuthority "@cert-authority " + concatStringsSep "," h.hostNames + " "
+ (if h.publicKey != null then h.publicKey else readFile h.publicKeyFile)
)) + "\n";
@ -128,6 +128,14 @@ in
default = {};
type = types.loaOf (types.submodule ({ name, ... }: {
options = {
certAuthority = mkOption {
type = types.bool;
default = false;
description = ''
This public key is an SSH certificate authority, rather than an
individual host's key.
'';
};
hostNames = mkOption {
type = types.listOf types.str;
default = [];

View file

@ -26,6 +26,7 @@ in
package = mkOption {
type = types.package;
default = pkgs.xonsh;
example = literalExample "pkgs.xonsh.override { configFile = \"/path/to/xonshrc\"; }";
description = ''
xonsh package to use.
@ -46,11 +47,11 @@ in
environment.etc."xonshrc".text = cfg.config;
environment.systemPackages = [ pkgs.xonsh ];
environment.systemPackages = [ cfg.package ];
environment.shells =
[ "/run/current-system/sw/bin/xonsh"
"${pkgs.xonsh}/bin/xonsh"
"${cfg.package}/bin/xonsh"
];
};

View file

@ -54,6 +54,18 @@ with lib;
'';
};
security.forcePageTableIsolation = mkOption {
type = types.bool;
default = false;
description = ''
Whether to force-enable the Page Table Isolation (PTI) Linux kernel
feature even on CPU models that claim to be safe from Meltdown.
This hardening feature is most beneficial to systems that run untrusted
workloads that rely on address space isolation for security.
'';
};
security.virtualisation.flushL1DataCache = mkOption {
type = types.nullOr (types.enum [ "never" "cond" "always" ]);
default = null;
@ -114,6 +126,10 @@ with lib;
boot.kernelParams = [ "nosmt" ];
})
(mkIf config.security.forcePageTableIsolation {
boot.kernelParams = [ "pti=on" ];
})
(mkIf (config.security.virtualisation.flushL1DataCache != null) {
boot.kernelParams = [ "kvm-intel.vmentry_l1d_flush=${config.security.virtualisation.flushL1DataCache}" ];
})

View file

@ -22,6 +22,12 @@ in {
###### implementation
config = mkIf cfg.enable {
assertions = [
{ assertion = (config.xdg.portal.enable == true);
message = "To use Flatpak you must enable XDG Desktop Portals with xdg.portal.enable.";
}
];
environment.systemPackages = [ pkgs.flatpak ];
services.dbus.packages = [ pkgs.flatpak ];

View file

@ -502,6 +502,7 @@ in {
"d ${cfg.statePath} 0750 ${cfg.user} ${cfg.group} -"
"d ${cfg.statePath}/builds 0750 ${cfg.user} ${cfg.group} -"
"d ${cfg.statePath}/config 0750 ${cfg.user} ${cfg.group} -"
"d ${cfg.statePath}/config/initializers 0750 ${cfg.user} ${cfg.group} -"
"d ${cfg.statePath}/db 0750 ${cfg.user} ${cfg.group} -"
"d ${cfg.statePath}/log 0750 ${cfg.user} ${cfg.group} -"
"d ${cfg.statePath}/repositories 2770 ${cfg.user} ${cfg.group} -"

View file

@ -467,7 +467,7 @@ in
fi
'';
nix.nrBuildUsers = mkDefault (lib.max 32 cfg.maxJobs);
nix.nrBuildUsers = mkDefault (lib.max 32 (if cfg.maxJobs == "auto" then 0 else cfg.maxJobs));
users.users = nixbldUsers;

View file

@ -411,7 +411,7 @@ in {
} else {
cert = "${cfg.pki.manual.server.cert}";
key = "${cfg.pki.manual.server.key}";
crl = "${cfg.pki.manual.server.crl}";
${mapNullable (_: "crl") cfg.pki.manual.server.crl} = "${cfg.pki.manual.server.crl}";
});
ca.cert = if needToCreateCA then "${cfg.dataDir}/keys/ca.cert"

View file

@ -28,6 +28,7 @@ let
"dovecot"
"fritzbox"
"json"
"mail"
"minio"
"nginx"
"node"
@ -162,13 +163,19 @@ in
};
config = mkMerge ([{
assertions = [{
assertions = [ {
assertion = (cfg.snmp.configurationPath == null) != (cfg.snmp.configuration == null);
message = ''
Please ensure you have either `services.prometheus.exporters.snmp.configuration'
or `services.prometheus.exporters.snmp.configurationPath' set!
'';
}];
} {
assertion = (cfg.mail.configFile == null) != (cfg.mail.configuration == {});
message = ''
Please specify either 'services.prometheus.exporters.mail.configuration'
or 'services.prometheus.exporters.mail.configFile'.
'';
} ];
}] ++ [(mkIf config.services.minio.enable {
services.prometheus.exporters.minio.minioAddress = mkDefault "http://localhost:9000";
services.prometheus.exporters.minio.minioAccessKey = mkDefault config.services.minio.accessKey;

View file

@ -0,0 +1,156 @@
{ config, lib, pkgs, options }:
with lib;
let
cfg = config.services.prometheus.exporters.mail;
configurationFile = pkgs.writeText "prometheus-mail-exporter.conf" (builtins.toJSON (
# removes the _module attribute, null values and converts attrNames to lowercase
mapAttrs' (name: value:
if name == "servers"
then nameValuePair (toLower name)
((map (srv: (mapAttrs' (n: v: nameValuePair (toLower n) v)
(filterAttrs (n: v: !(n == "_module" || v == null)) srv)
))) value)
else nameValuePair (toLower name) value
) (filterAttrs (n: _: !(n == "_module")) cfg.configuration)
));
serverOptions.options = {
name = mkOption {
type = types.str;
description = ''
Value for label 'configname' which will be added to all metrics.
'';
};
server = mkOption {
type = types.str;
description = ''
Hostname of the server that should be probed.
'';
};
port = mkOption {
type = types.int;
example = 587;
description = ''
Port to use for SMTP.
'';
};
from = mkOption {
type = types.str;
example = "exporteruser@domain.tld";
description = ''
Content of 'From' Header for probing mails.
'';
};
to = mkOption {
type = types.str;
example = "exporteruser@domain.tld";
description = ''
Content of 'To' Header for probing mails.
'';
};
detectionDir = mkOption {
type = types.path;
example = "/var/spool/mail/exporteruser/new";
description = ''
Directory in which new mails for the exporter user are placed.
Note that this needs to exist when the exporter starts.
'';
};
login = mkOption {
type = types.nullOr types.str;
default = null;
example = "exporteruser@domain.tld";
description = ''
Username to use for SMTP authentication.
'';
};
passphrase = mkOption {
type = types.nullOr types.str;
default = null;
description = ''
Password to use for SMTP authentication.
'';
};
};
exporterOptions.options = {
monitoringInterval = mkOption {
type = types.str;
example = "10s";
description = ''
Time interval between two probe attempts.
'';
};
mailCheckTimeout = mkOption {
type = types.str;
description = ''
Timeout until mails are considered "didn't make it".
'';
};
disableFileDelition = mkOption {
type = types.bool;
default = false;
description = ''
Disables the exporter's function to delete probing mails.
'';
};
servers = mkOption {
type = types.listOf (types.submodule serverOptions);
default = [];
example = literalExample ''
[ {
name = "testserver";
server = "smtp.domain.tld";
port = 587;
from = "exporteruser@domain.tld";
to = "exporteruser@domain.tld";
detectionDir = "/path/to/Maildir/new";
} ]
'';
description = ''
List of servers that should be probed.
'';
};
};
in
{
port = 9225;
extraOpts = {
configFile = mkOption {
type = types.nullOr types.path;
default = null;
description = ''
Specify the mailexporter configuration file to use.
'';
};
configuration = mkOption {
type = types.submodule exporterOptions;
default = {};
description = ''
Specify the mailexporter configuration file to use.
'';
};
telemetryPath = mkOption {
type = types.str;
default = "/metrics";
description = ''
Path under which to expose metrics.
'';
};
};
serviceOpts = {
serviceConfig = {
ExecStart = ''
${pkgs.prometheus-mail-exporter}/bin/mailexporter \
--web.listen-address ${cfg.listenAddress}:${toString cfg.port} \
--config.file ${
if cfg.configuration != {} then configurationFile else cfg.configFile
} \
${concatStringsSep " \\\n " cfg.extraFlags}
'';
};
};
}

View file

@ -0,0 +1,801 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.thanos;
nullOpt = type: description: mkOption {
type = types.nullOr type;
default = null;
inherit description;
};
optionToArgs = opt: v : optional (v != null) ''--${opt}="${toString v}"'';
flagToArgs = opt: v : optional v ''--${opt}'';
listToArgs = opt: vs : map (v: ''--${opt}="${v}"'') vs;
attrsToArgs = opt: kvs: mapAttrsToList (k: v: ''--${opt}=${k}=\"${v}\"'') kvs;
mkParamDef = type: default: description: mkParam type (description + ''
Defaults to <literal>${toString default}</literal> in Thanos
when set to <literal>null</literal>.
'');
mkParam = type: description: {
toArgs = optionToArgs;
option = nullOpt type description;
};
mkFlagParam = description: {
toArgs = flagToArgs;
option = mkOption {
type = types.bool;
default = false;
inherit description;
};
};
mkListParam = opt: description: {
toArgs = _opt: listToArgs opt;
option = mkOption {
type = types.listOf types.str;
default = [];
inherit description;
};
};
mkAttrsParam = opt: description: {
toArgs = _opt: attrsToArgs opt;
option = mkOption {
type = types.attrsOf types.str;
default = {};
inherit description;
};
};
mkStateDirParam = opt: default: description: {
toArgs = _opt: stateDir: optionToArgs opt "/var/lib/${stateDir}";
option = mkOption {
type = types.str;
inherit default;
inherit description;
};
};
toYAML = name: attrs: pkgs.runCommandNoCC name {
preferLocalBuild = true;
json = builtins.toFile "${name}.json" (builtins.toJSON attrs);
nativeBuildInputs = [ pkgs.remarshal ];
} ''json2yaml -i $json -o $out'';
thanos = cmd: "${cfg.package}/bin/thanos ${cmd}" +
(let args = cfg."${cmd}".arguments;
in optionalString (length args != 0) (" \\\n " +
concatStringsSep " \\\n " args));
argumentsOf = cmd: concatLists (collect isList
(flip mapParamsRecursive params."${cmd}" (path: param:
let opt = concatStringsSep "." path;
v = getAttrFromPath path cfg."${cmd}";
in param.toArgs opt v)));
mkArgumentsOption = cmd: mkOption {
type = types.listOf types.str;
default = argumentsOf cmd;
description = ''
Arguments to the <literal>thanos ${cmd}</literal> command.
Defaults to a list of arguments formed by converting the structured
options of <option>services.thanos.${cmd}</option> to a list of arguments.
Overriding this option will cause none of the structured options to have
any effect. So only set this if you know what you're doing!
'';
};
mapParamsRecursive =
let noParam = attr: !(attr ? "toArgs" && attr ? "option");
in mapAttrsRecursiveCond noParam;
paramsToOptions = mapParamsRecursive (_path: param: param.option);
params = {
log = {
log.level = mkParamDef (types.enum ["debug" "info" "warn" "error" "fatal"]) "info" ''
Log filtering level.
'';
log.format = mkParam types.str ''
Log format to use.
'';
};
tracing = cfg: {
tracing.config-file = {
toArgs = _opt: path: optionToArgs "tracing.config-file" path;
option = mkOption {
type = with types; nullOr str;
default = if cfg.tracing.config == null then null
else toString (toYAML "tracing.yaml" cfg.tracing.config);
defaultText = ''
if config.services.thanos.<cmd>.tracing.config == null then null
else toString (toYAML "tracing.yaml" config.services.thanos.<cmd>.tracing.config);
'';
description = ''
Path to YAML file that contains tracing configuration.
'';
};
};
tracing.config =
{
toArgs = _opt: _attrs: [];
option = nullOpt types.attrs ''
Tracing configuration.
When not <literal>null</literal> the attribute set gets converted to
a YAML file and stored in the Nix store. The option
<option>tracing.config-file</option> will default to its path.
If <option>tracing.config-file</option> is set this option has no effect.
'';
};
};
common = cfg: params.log // params.tracing cfg // {
http-address = mkParamDef types.str "0.0.0.0:10902" ''
Listen <literal>host:port</literal> for HTTP endpoints.
'';
grpc-address = mkParamDef types.str "0.0.0.0:10901" ''
Listen <literal>ip:port</literal> address for gRPC endpoints (StoreAPI).
Make sure this address is routable from other components.
'';
grpc-server-tls-cert = mkParam types.str ''
TLS Certificate for gRPC server, leave blank to disable TLS
'';
grpc-server-tls-key = mkParam types.str ''
TLS Key for the gRPC server, leave blank to disable TLS
'';
grpc-server-tls-client-ca = mkParam types.str ''
TLS CA to verify clients against.
If no client CA is specified, there is no client verification on server side.
(tls.NoClientCert)
'';
};
objstore = cfg: {
objstore.config-file = {
toArgs = _opt: path: optionToArgs "objstore.config-file" path;
option = mkOption {
type = with types; nullOr str;
default = if cfg.objstore.config == null then null
else toString (toYAML "objstore.yaml" cfg.objstore.config);
defaultText = ''
if config.services.thanos.<cmd>.objstore.config == null then null
else toString (toYAML "objstore.yaml" config.services.thanos.<cmd>.objstore.config);
'';
description = ''
Path to YAML file that contains object store configuration.
'';
};
};
objstore.config =
{
toArgs = _opt: _attrs: [];
option = nullOpt types.attrs ''
Object store configuration.
When not <literal>null</literal> the attribute set gets converted to
a YAML file and stored in the Nix store. The option
<option>objstore.config-file</option> will default to its path.
If <option>objstore.config-file</option> is set this option has no effect.
'';
};
};
sidecar = params.common cfg.sidecar // params.objstore cfg.sidecar // {
prometheus.url = mkParamDef types.str "http://localhost:9090" ''
URL at which to reach Prometheus's API.
For better performance use local network.
'';
tsdb.path = {
toArgs = optionToArgs;
option = mkOption {
type = types.str;
default = "/var/lib/${config.services.prometheus2.stateDir}/data";
defaultText = "/var/lib/\${config.services.prometheus2.stateDir}/data";
description = ''
Data directory of TSDB.
'';
};
};
reloader.config-file = mkParam types.str ''
Config file watched by the reloader.
'';
reloader.config-envsubst-file = mkParam types.str ''
Output file for environment variable substituted config file.
'';
reloader.rule-dirs = mkListParam "reloader.rule-dir" ''
Rule directories for the reloader to refresh.
'';
};
store = params.common cfg.store // params.objstore cfg.store // {
stateDir = mkStateDirParam "data-dir" "thanos-store" ''
Data directory relative to <literal>/var/lib</literal>
in which to cache remote blocks.
'';
index-cache-size = mkParamDef types.str "250MB" ''
Maximum size of items held in the index cache.
'';
chunk-pool-size = mkParamDef types.str "2GB" ''
Maximum size of concurrently allocatable bytes for chunks.
'';
store.grpc.series-sample-limit = mkParamDef types.int 0 ''
Maximum amount of samples returned via a single Series call.
<literal>0</literal> means no limit.
NOTE: for efficiency we take 120 as the number of samples in chunk (it
cannot be bigger than that), so the actual number of samples might be
lower, even though the maximum could be hit.
'';
store.grpc.series-max-concurrency = mkParamDef types.int 20 ''
Maximum number of concurrent Series calls.
'';
sync-block-duration = mkParamDef types.str "3m" ''
Repeat interval for syncing the blocks between local and remote view.
'';
block-sync-concurrency = mkParamDef types.int 20 ''
Number of goroutines to use when syncing blocks from object storage.
'';
};
query = params.common cfg.query // {
grpc-client-tls-secure = mkFlagParam ''
Use TLS when talking to the gRPC server
'';
grpc-client-tls-cert = mkParam types.str ''
TLS Certificates to use to identify this client to the server
'';
grpc-client-tls-key = mkParam types.str ''
TLS Key for the client's certificate
'';
grpc-client-tls-ca = mkParam types.str ''
TLS CA Certificates to use to verify gRPC servers
'';
grpc-client-server-name = mkParam types.str ''
Server name to verify the hostname on the returned gRPC certificates.
See <link xlink:href="https://tools.ietf.org/html/rfc4366#section-3.1"/>
'';
web.route-prefix = mkParam types.str ''
Prefix for API and UI endpoints.
This allows thanos UI to be served on a sub-path. This option is
analogous to <option>web.route-prefix</option> of Promethus.
'';
web.external-prefix = mkParam types.str ''
Static prefix for all HTML links and redirect URLs in the UI query web
interface.
Actual endpoints are still served on / or the
<option>web.route-prefix</option>. This allows thanos UI to be served
behind a reverse proxy that strips a URL sub-path.
'';
web.prefix-header = mkParam types.str ''
Name of HTTP request header used for dynamic prefixing of UI links and
redirects.
This option is ignored if the option
<literal>web.external-prefix</literal> is set.
Security risk: enable this option only if a reverse proxy in front of
thanos is resetting the header.
The setting <literal>web.prefix-header="X-Forwarded-Prefix"</literal>
can be useful, for example, if Thanos UI is served via Traefik reverse
proxy with <literal>PathPrefixStrip</literal> option enabled, which
sends the stripped prefix value in <literal>X-Forwarded-Prefix</literal>
header. This allows thanos UI to be served on a sub-path.
'';
query.timeout = mkParamDef types.str "2m" ''
Maximum time to process query by query node.
'';
query.max-concurrent = mkParamDef types.int 20 ''
Maximum number of queries processed concurrently by query node.
'';
query.replica-label = mkParam types.str ''
Label to treat as a replica indicator along which data is
deduplicated.
Still you will be able to query without deduplication using
<literal>dedup=false</literal> parameter.
'';
selector-labels = mkAttrsParam "selector-label" ''
Query selector labels that will be exposed in info endpoint.
'';
store.addresses = mkListParam "store" ''
Addresses of statically configured store API servers.
The scheme may be prefixed with <literal>dns+</literal> or
<literal>dnssrv+</literal> to detect store API servers through
respective DNS lookups.
'';
store.sd-files = mkListParam "store.sd-files" ''
Path to files that contain addresses of store API servers. The path
can be a glob pattern.
'';
store.sd-interval = mkParamDef types.str "5m" ''
Refresh interval to re-read file SD files. It is used as a resync fallback.
'';
store.sd-dns-interval = mkParamDef types.str "30s" ''
Interval between DNS resolutions.
'';
store.unhealthy-timeout = mkParamDef types.str "5m" ''
Timeout before an unhealthy store is cleaned from the store UI page.
'';
query.auto-downsampling = mkFlagParam ''
Enable automatic adjustment (step / 5) to what source of data should
be used in store gateways if no
<literal>max_source_resolution</literal> param is specified.
'';
query.partial-response = mkFlagParam ''
Enable partial response for queries if no
<literal>partial_response</literal> param is specified.
'';
query.default-evaluation-interval = mkParamDef types.str "1m" ''
Set default evaluation interval for sub queries.
'';
store.response-timeout = mkParamDef types.str "0ms" ''
If a Store doesn't send any data in this specified duration then a
Store will be ignored and partial data will be returned if it's
enabled. <literal>0</literal> disables timeout.
'';
};
rule = params.common cfg.rule // params.objstore cfg.rule // {
labels = mkAttrsParam "label" ''
Labels to be applied to all generated metrics.
Similar to external labels for Prometheus,
used to identify ruler and its blocks as unique source.
'';
stateDir = mkStateDirParam "data-dir" "thanos-rule" ''
Data directory relative to <literal>/var/lib</literal>.
'';
rule-files = mkListParam "rule-file" ''
Rule files that should be used by rule manager. Can be in glob format.
'';
eval-interval = mkParamDef types.str "30s" ''
The default evaluation interval to use.
'';
tsdb.block-duration = mkParamDef types.str "2h" ''
Block duration for TSDB block.
'';
tsdb.retention = mkParamDef types.str "48h" ''
Block retention time on local disk.
'';
alertmanagers.urls = mkListParam "alertmanagers.url" ''
Alertmanager replica URLs to push firing alerts.
Ruler claims success if push to at least one alertmanager from
discovered succeeds. The scheme may be prefixed with
<literal>dns+</literal> or <literal>dnssrv+</literal> to detect
Alertmanager IPs through respective DNS lookups. The port defaults to
<literal>9093</literal> or the SRV record's value. The URL path is
used as a prefix for the regular Alertmanager API path.
'';
alertmanagers.send-timeout = mkParamDef types.str "10s" ''
Timeout for sending alerts to alertmanager.
'';
alert.query-url = mkParam types.str ''
The external Thanos Query URL that would be set in all alerts 'Source' field.
'';
alert.label-drop = mkListParam "alert.label-drop" ''
Labels by name to drop before sending to alertmanager.
This allows alert to be deduplicated on replica label.
Similar Prometheus alert relabelling
'';
web.route-prefix = mkParam types.str ''
Prefix for API and UI endpoints.
This allows thanos UI to be served on a sub-path.
This option is analogous to <literal>--web.route-prefix</literal> of Promethus.
'';
web.external-prefix = mkParam types.str ''
Static prefix for all HTML links and redirect URLs in the UI query web
interface.
Actual endpoints are still served on / or the
<option>web.route-prefix</option>. This allows thanos UI to be served
behind a reverse proxy that strips a URL sub-path.
'';
web.prefix-header = mkParam types.str ''
Name of HTTP request header used for dynamic prefixing of UI links and
redirects.
This option is ignored if the option
<option>web.external-prefix</option> is set.
Security risk: enable this option only if a reverse proxy in front of
thanos is resetting the header.
The header <literal>X-Forwarded-Prefix</literal> can be useful, for
example, if Thanos UI is served via Traefik reverse proxy with
<literal>PathPrefixStrip</literal> option enabled, which sends the
stripped prefix value in <literal>X-Forwarded-Prefix</literal>
header. This allows thanos UI to be served on a sub-path.
'';
query.addresses = mkListParam "query" ''
Addresses of statically configured query API servers.
The scheme may be prefixed with <literal>dns+</literal> or
<literal>dnssrv+</literal> to detect query API servers through
respective DNS lookups.
'';
query.sd-files = mkListParam "query.sd-files" ''
Path to file that contain addresses of query peers.
The path can be a glob pattern.
'';
query.sd-interval = mkParamDef types.str "5m" ''
Refresh interval to re-read file SD files. (used as a fallback)
'';
query.sd-dns-interval = mkParamDef types.str "30s" ''
Interval between DNS resolutions.
'';
};
compact = params.log // params.tracing cfg.compact // params.objstore cfg.compact // {
http-address = mkParamDef types.str "0.0.0.0:10902" ''
Listen <literal>host:port</literal> for HTTP endpoints.
'';
stateDir = mkStateDirParam "data-dir" "thanos-compact" ''
Data directory relative to <literal>/var/lib</literal>
in which to cache blocks and process compactions.
'';
consistency-delay = mkParamDef types.str "30m" ''
Minimum age of fresh (non-compacted) blocks before they are being
processed. Malformed blocks older than the maximum of consistency-delay
and 30m0s will be removed.
'';
retention.resolution-raw = mkParamDef types.str "0d" ''
How long to retain raw samples in bucket.
<literal>0d</literal> - disables this retention
'';
retention.resolution-5m = mkParamDef types.str "0d" ''
How long to retain samples of resolution 1 (5 minutes) in bucket.
<literal>0d</literal> - disables this retention
'';
retention.resolution-1h = mkParamDef types.str "0d" ''
How long to retain samples of resolution 2 (1 hour) in bucket.
<literal>0d</literal> - disables this retention
'';
startAt = {
toArgs = _opt: startAt: flagToArgs "wait" (startAt == null);
option = nullOpt types.str ''
When this option is set to a <literal>systemd.time</literal>
specification the Thanos compactor will run at the specified period.
When this option is <literal>null</literal> the Thanos compactor service
will run continuously. So it will not exit after all compactions have
been processed but wait for new work.
'';
};
block-sync-concurrency = mkParamDef types.int 20 ''
Number of goroutines to use when syncing block metadata from object storage.
'';
compact.concurrency = mkParamDef types.int 1 ''
Number of goroutines to use when compacting groups.
'';
};
downsample = params.log // params.tracing cfg.downsample // params.objstore cfg.downsample // {
stateDir = mkStateDirParam "data-dir" "thanos-downsample" ''
Data directory relative to <literal>/var/lib</literal>
in which to cache blocks and process downsamplings.
'';
};
receive = params.common cfg.receive // params.objstore cfg.receive // {
remote-write.address = mkParamDef types.str "0.0.0.0:19291" ''
Address to listen on for remote write requests.
'';
stateDir = mkStateDirParam "tsdb.path" "thanos-receive" ''
Data directory relative to <literal>/var/lib</literal> of TSDB.
'';
labels = mkAttrsParam "labels" ''
External labels to announce.
This flag will be removed in the future when handling multiple tsdb
instances is added.
'';
tsdb.retention = mkParamDef types.str "15d" ''
How long to retain raw samples on local storage.
<literal>0d</literal> - disables this retention
'';
};
};
assertRelativeStateDir = cmd: {
assertions = [
{
assertion = !hasPrefix "/" cfg."${cmd}".stateDir;
message =
"The option services.thanos.${cmd}.stateDir should not be an absolute directory." +
" It should be a directory relative to /var/lib.";
}
];
};
in {
options.services.thanos = {
package = mkOption {
type = types.package;
default = pkgs.thanos;
defaultText = "pkgs.thanos";
description = ''
The thanos package that should be used.
'';
};
sidecar = paramsToOptions params.sidecar // {
enable = mkEnableOption
"the Thanos sidecar for Prometheus server";
arguments = mkArgumentsOption "sidecar";
};
store = paramsToOptions params.store // {
enable = mkEnableOption
"the Thanos store node giving access to blocks in a bucket provider.";
arguments = mkArgumentsOption "store";
};
query = paramsToOptions params.query // {
enable = mkEnableOption
("the Thanos query node exposing PromQL enabled Query API " +
"with data retrieved from multiple store nodes");
arguments = mkArgumentsOption "query";
};
rule = paramsToOptions params.rule // {
enable = mkEnableOption
("the Thanos ruler service which evaluates Prometheus rules against" +
" given Query nodes, exposing Store API and storing old blocks in bucket");
arguments = mkArgumentsOption "rule";
};
compact = paramsToOptions params.compact // {
enable = mkEnableOption
"the Thanos compactor which continuously compacts blocks in an object store bucket";
arguments = mkArgumentsOption "compact";
};
downsample = paramsToOptions params.downsample // {
enable = mkEnableOption
"the Thanos downsampler which continuously downsamples blocks in an object store bucket";
arguments = mkArgumentsOption "downsample";
};
receive = paramsToOptions params.receive // {
enable = mkEnableOption
("the Thanos receiver which accept Prometheus remote write API requests " +
"and write to local tsdb (EXPERIMENTAL, this may change drastically without notice)");
arguments = mkArgumentsOption "receive";
};
};
config = mkMerge [
(mkIf cfg.sidecar.enable {
assertions = [
{
assertion = config.services.prometheus2.enable;
message =
"Please enable services.prometheus2 when enabling services.thanos.sidecar.";
}
{
assertion = !(config.services.prometheus2.globalConfig.external_labels == null ||
config.services.prometheus2.globalConfig.external_labels == {});
message =
"services.thanos.sidecar requires uniquely identifying external labels " +
"to be configured in the Prometheus server. " +
"Please set services.prometheus2.globalConfig.external_labels.";
}
];
systemd.services.thanos-sidecar = {
wantedBy = [ "multi-user.target" ];
after = [ "network.target" "prometheus2.service" ];
serviceConfig = {
User = "prometheus";
Restart = "always";
ExecStart = thanos "sidecar";
};
};
})
(mkIf cfg.store.enable (mkMerge [
(assertRelativeStateDir "store")
{
systemd.services.thanos-store = {
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
serviceConfig = {
DynamicUser = true;
StateDirectory = cfg.store.stateDir;
Restart = "always";
ExecStart = thanos "store";
};
};
}
]))
(mkIf cfg.query.enable {
systemd.services.thanos-query = {
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
serviceConfig = {
DynamicUser = true;
Restart = "always";
ExecStart = thanos "query";
};
};
})
(mkIf cfg.rule.enable (mkMerge [
(assertRelativeStateDir "rule")
{
systemd.services.thanos-rule = {
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
serviceConfig = {
DynamicUser = true;
StateDirectory = cfg.rule.stateDir;
Restart = "always";
ExecStart = thanos "rule";
};
};
}
]))
(mkIf cfg.compact.enable (mkMerge [
(assertRelativeStateDir "compact")
{
systemd.services.thanos-compact =
let wait = cfg.compact.startAt == null; in {
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
serviceConfig = {
Type = if wait then "simple" else "oneshot";
Restart = if wait then "always" else "no";
DynamicUser = true;
StateDirectory = cfg.compact.stateDir;
ExecStart = thanos "compact";
};
} // optionalAttrs (!wait) { inherit (cfg.compact) startAt; };
}
]))
(mkIf cfg.downsample.enable (mkMerge [
(assertRelativeStateDir "downsample")
{
systemd.services.thanos-downsample = {
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
serviceConfig = {
DynamicUser = true;
StateDirectory = cfg.downsample.stateDir;
Restart = "always";
ExecStart = thanos "downsample";
};
};
}
]))
(mkIf cfg.receive.enable (mkMerge [
(assertRelativeStateDir "receive")
{
systemd.services.thanos-receive = {
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
serviceConfig = {
DynamicUser = true;
StateDirectory = cfg.receive.stateDir;
Restart = "always";
ExecStart = thanos "receive";
};
};
}
]))
];
}

View file

@ -177,7 +177,7 @@ in {
basePackages = mkOption {
type = types.attrsOf types.package;
default = { inherit (pkgs)
networkmanager modemmanager wpa_supplicant
networkmanager modemmanager wpa_supplicant crda
networkmanager-openvpn networkmanager-vpnc
networkmanager-openconnect networkmanager-fortisslvpn
networkmanager-l2tp networkmanager-iodine; };

View file

@ -372,16 +372,18 @@ in {
systemd.packages = [ pkgs.syncthing ];
users = mkIf (cfg.systemService && cfg.user == defaultUser) {
users."${defaultUser}" =
users.users = mkIf (cfg.systemService && cfg.user == defaultUser) {
"${defaultUser}" =
{ group = cfg.group;
home = cfg.dataDir;
createHome = true;
uid = config.ids.uids.syncthing;
description = "Syncthing daemon user";
};
};
groups."${defaultUser}".gid =
users.groups = mkIf (cfg.systemService && cfg.group == defaultUser) {
"${defaultUser}".gid =
config.ids.gids.syncthing;
};

View file

@ -204,6 +204,7 @@ in {
environment.systemPackages = [ pkgs.wpa_supplicant ];
services.dbus.packages = [ pkgs.wpa_supplicant ];
services.udev.packages = [ pkgs.crda ];
# FIXME: start a separate wpa_supplicant instance per interface.
systemd.services.wpa_supplicant = let

View file

@ -107,8 +107,6 @@ in {
path = with pkgs; [ iptables ipset iproute systemd ];
postStart = ''
${pkgs.ipset}/bin/ipset -quiet create -exist sshguard4 hash:ip family inet
${pkgs.ipset}/bin/ipset -quiet create -exist sshguard6 hash:ip family inet6
${pkgs.iptables}/bin/iptables -I INPUT -m set --match-set sshguard4 src -j DROP
${pkgs.iptables}/bin/ip6tables -I INPUT -m set --match-set sshguard6 src -j DROP
'';

View file

@ -0,0 +1,473 @@
{ config, pkgs, lib, ... }:
let
inherit (lib) mkDefault mkEnableOption mkForce mkIf mkMerge mkOption;
inherit (lib) concatStringsSep literalExample mapAttrsToList optional optionals optionalString types;
cfg = config.services.mediawiki;
fpm = config.services.phpfpm.pools.mediawiki;
user = "mediawiki";
group = config.services.httpd.group;
cacheDir = "/var/cache/mediawiki";
stateDir = "/var/lib/mediawiki";
pkg = pkgs.stdenv.mkDerivation rec {
pname = "mediawiki-full";
version = src.version;
src = cfg.package;
installPhase = ''
mkdir -p $out
cp -r * $out/
rm -rf $out/share/mediawiki/skins/*
rm -rf $out/share/mediawiki/extensions/*
${concatStringsSep "\n" (mapAttrsToList (k: v: ''
ln -s ${v} $out/share/mediawiki/skins/${k}
'') cfg.skins)}
${concatStringsSep "\n" (mapAttrsToList (k: v: ''
ln -s ${v} $out/share/mediawiki/extensions/${k}
'') cfg.extensions)}
'';
};
mediawikiScripts = pkgs.runCommand "mediawiki-scripts" {
buildInputs = [ pkgs.makeWrapper ];
preferLocalBuild = true;
} ''
mkdir -p $out/bin
for i in changePassword.php createAndPromote.php userOptions.php edit.php nukePage.php update.php; do
makeWrapper ${pkgs.php}/bin/php $out/bin/mediawiki-$(basename $i .php) \
--set MEDIAWIKI_CONFIG ${mediawikiConfig} \
--add-flags ${pkg}/share/mediawiki/maintenance/$i
done
'';
mediawikiConfig = pkgs.writeText "LocalSettings.php" ''
<?php
# Protect against web entry
if ( !defined( 'MEDIAWIKI' ) ) {
exit;
}
$wgSitename = "${cfg.name}";
$wgMetaNamespace = false;
## The URL base path to the directory containing the wiki;
## defaults for all runtime URL paths are based off of this.
## For more information on customizing the URLs
## (like /w/index.php/Page_title to /wiki/Page_title) please see:
## https://www.mediawiki.org/wiki/Manual:Short_URL
$wgScriptPath = "";
## The protocol and server name to use in fully-qualified URLs
$wgServer = "${if cfg.virtualHost.enableSSL then "https" else "http"}://${cfg.virtualHost.hostName}";
## The URL path to static resources (images, scripts, etc.)
$wgResourceBasePath = $wgScriptPath;
## The URL path to the logo. Make sure you change this from the default,
## or else you'll overwrite your logo when you upgrade!
$wgLogo = "$wgResourceBasePath/resources/assets/wiki.png";
## UPO means: this is also a user preference option
$wgEnableEmail = true;
$wgEnableUserEmail = true; # UPO
$wgEmergencyContact = "${if cfg.virtualHost.adminAddr != null then cfg.virtualHost.adminAddr else config.services.httpd.adminAddr}";
$wgPasswordSender = $wgEmergencyContact;
$wgEnotifUserTalk = false; # UPO
$wgEnotifWatchlist = false; # UPO
$wgEmailAuthentication = true;
## Database settings
$wgDBtype = "${cfg.database.type}";
$wgDBserver = "${cfg.database.host}:${if cfg.database.socket != null then cfg.database.socket else toString cfg.database.port}";
$wgDBname = "${cfg.database.name}";
$wgDBuser = "${cfg.database.user}";
${optionalString (cfg.database.passwordFile != null) "$wgDBpassword = file_get_contents(\"${cfg.database.passwordFile}\");"}
${optionalString (cfg.database.type == "mysql" && cfg.database.tablePrefix != null) ''
# MySQL specific settings
$wgDBprefix = "${cfg.database.tablePrefix}";
''}
${optionalString (cfg.database.type == "mysql") ''
# MySQL table options to use during installation or update
$wgDBTableOptions = "ENGINE=InnoDB, DEFAULT CHARSET=binary";
''}
## Shared memory settings
$wgMainCacheType = CACHE_NONE;
$wgMemCachedServers = [];
${optionalString (cfg.uploadsDir != null) ''
$wgEnableUploads = true;
$wgUploadDirectory = "${cfg.uploadsDir}";
''}
$wgUseImageMagick = true;
$wgImageMagickConvertCommand = "${pkgs.imagemagick}/bin/convert";
# InstantCommons allows wiki to use images from https://commons.wikimedia.org
$wgUseInstantCommons = false;
# Periodically send a pingback to https://www.mediawiki.org/ with basic data
# about this MediaWiki instance. The Wikimedia Foundation shares this data
# with MediaWiki developers to help guide future development efforts.
$wgPingback = true;
## If you use ImageMagick (or any other shell command) on a
## Linux server, this will need to be set to the name of an
## available UTF-8 locale
$wgShellLocale = "C.UTF-8";
## Set $wgCacheDirectory to a writable directory on the web server
## to make your wiki go slightly faster. The directory should not
## be publically accessible from the web.
$wgCacheDirectory = "${cacheDir}";
# Site language code, should be one of the list in ./languages/data/Names.php
$wgLanguageCode = "en";
$wgSecretKey = file_get_contents("${stateDir}/secret.key");
# Changing this will log out all existing sessions.
$wgAuthenticationTokenVersion = "";
## For attaching licensing metadata to pages, and displaying an
## appropriate copyright notice / icon. GNU Free Documentation
## License and Creative Commons licenses are supported so far.
$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright
$wgRightsUrl = "";
$wgRightsText = "";
$wgRightsIcon = "";
# Path to the GNU diff3 utility. Used for conflict resolution.
$wgDiff = "${pkgs.diffutils}/bin/diff";
$wgDiff3 = "${pkgs.diffutils}/bin/diff3";
# Enabled skins.
${concatStringsSep "\n" (mapAttrsToList (k: v: "wfLoadSkin('${k}');") cfg.skins)}
# Enabled extensions.
${concatStringsSep "\n" (mapAttrsToList (k: v: "wfLoadExtension('${k}');") cfg.extensions)}
# End of automatically generated settings.
# Add more configuration options below.
${cfg.extraConfig}
'';
in
{
# interface
options = {
services.mediawiki = {
enable = mkEnableOption "MediaWiki";
package = mkOption {
type = types.package;
default = pkgs.mediawiki;
description = "Which MediaWiki package to use.";
};
name = mkOption {
default = "MediaWiki";
example = "Foobar Wiki";
description = "Name of the wiki.";
};
uploadsDir = mkOption {
type = types.nullOr types.path;
default = "${stateDir}/uploads";
description = ''
This directory is used for uploads of pictures. The directory passed here is automatically
created and permissions adjusted as required.
'';
};
passwordFile = mkOption {
type = types.path;
description = "A file containing the initial password for the admin user.";
example = "/run/keys/mediawiki-password";
};
skins = mkOption {
default = {};
type = types.attrsOf types.path;
description = ''
List of paths whose content is copied to the 'skins'
subdirectory of the MediaWiki installation.
'';
};
extensions = mkOption {
default = {};
type = types.attrsOf types.path;
description = ''
List of paths whose content is copied to the 'extensions'
subdirectory of the MediaWiki installation.
'';
};
database = {
type = mkOption {
type = types.enum [ "mysql" "postgres" "sqlite" "mssql" "oracle" ];
default = "mysql";
description = "Database engine to use. MySQL/MariaDB is the database of choice by MediaWiki developers.";
};
host = mkOption {
type = types.str;
default = "localhost";
description = "Database host address.";
};
port = mkOption {
type = types.port;
default = 3306;
description = "Database host port.";
};
name = mkOption {
type = types.str;
default = "mediawiki";
description = "Database name.";
};
user = mkOption {
type = types.str;
default = "mediawiki";
description = "Database user.";
};
passwordFile = mkOption {
type = types.nullOr types.path;
default = null;
example = "/run/keys/mediawiki-dbpassword";
description = ''
A file containing the password corresponding to
<option>database.user</option>.
'';
};
tablePrefix = mkOption {
type = types.nullOr types.str;
default = null;
description = ''
If you only have access to a single database and wish to install more than
one version of MediaWiki, or have other applications that also use the
database, you can give the table names a unique prefix to stop any naming
conflicts or confusion.
See <link xlink:href='https://www.mediawiki.org/wiki/Manual:$wgDBprefix'/>.
'';
};
socket = mkOption {
type = types.nullOr types.path;
default = if cfg.database.createLocally then "/run/mysqld/mysqld.sock" else null;
defaultText = "/run/mysqld/mysqld.sock";
description = "Path to the unix socket file to use for authentication.";
};
createLocally = mkOption {
type = types.bool;
default = cfg.database.type == "mysql";
defaultText = "true";
description = ''
Create the database and database user locally.
This currently only applies if database type "mysql" is selected.
'';
};
};
virtualHost = mkOption {
type = types.submodule ({
options = import ../web-servers/apache-httpd/per-server-options.nix {
inherit lib;
forMainServer = false;
};
});
example = literalExample ''
{
hostName = "mediawiki.example.org";
enableSSL = true;
adminAddr = "webmaster@example.org";
sslServerCert = "/var/lib/acme/mediawiki.example.org/full.pem";
sslServerKey = "/var/lib/acme/mediawiki.example.org/key.pem";
}
'';
description = ''
Apache configuration can be done by adapting <option>services.httpd.virtualHosts</option>.
See <xref linkend="opt-services.httpd.virtualHosts"/> for further information.
'';
};
poolConfig = mkOption {
type = types.lines;
default = ''
pm = dynamic
pm.max_children = 32
pm.start_servers = 2
pm.min_spare_servers = 2
pm.max_spare_servers = 4
pm.max_requests = 500
'';
description = ''
Options for MediaWiki's PHP pool. See the documentation on <literal>php-fpm.conf</literal>
for details on configuration directives.
'';
};
extraConfig = mkOption {
type = types.lines;
description = ''
Any additional text to be appended to MediaWiki's
LocalSettings.php configuration file. For configuration
settings, see <link xlink:href="https://www.mediawiki.org/wiki/Manual:Configuration_settings"/>.
'';
default = "";
example = ''
$wgEnableEmail = false;
'';
};
};
};
# implementation
config = mkIf cfg.enable {
assertions = [
{ assertion = cfg.database.createLocally -> cfg.database.type == "mysql";
message = "services.mediawiki.createLocally is currently only supported for database type 'mysql'";
}
{ assertion = cfg.database.createLocally -> cfg.database.user == user;
message = "services.mediawiki.database.user must be set to ${user} if services.mediawiki.database.createLocally is set true";
}
{ assertion = cfg.database.createLocally -> cfg.database.socket != null;
message = "services.mediawiki.database.socket must be set if services.mediawiki.database.createLocally is set to true";
}
{ assertion = cfg.database.createLocally -> cfg.database.passwordFile == null;
message = "a password cannot be specified if services.mediawiki.database.createLocally is set to true";
}
];
services.mediawiki.skins = {
MonoBook = "${cfg.package}/share/mediawiki/skins/MonoBook";
Timeless = "${cfg.package}/share/mediawiki/skins/Timeless";
Vector = "${cfg.package}/share/mediawiki/skins/Vector";
};
services.mysql = mkIf cfg.database.createLocally {
enable = true;
package = mkDefault pkgs.mariadb;
ensureDatabases = [ cfg.database.name ];
ensureUsers = [
{ name = cfg.database.user;
ensurePermissions = { "${cfg.database.name}.*" = "ALL PRIVILEGES"; };
}
];
};
services.phpfpm.pools.mediawiki = {
listen = "/run/phpfpm/mediawiki.sock";
extraConfig = ''
listen.owner = ${config.services.httpd.user}
listen.group = ${config.services.httpd.group}
user = ${user}
group = ${group}
env[MEDIAWIKI_CONFIG] = ${mediawikiConfig}
${cfg.poolConfig}
'';
};
services.httpd = {
enable = true;
adminAddr = mkDefault cfg.virtualHost.adminAddr;
extraModules = [ "proxy_fcgi" ];
virtualHosts = [ (mkMerge [
cfg.virtualHost {
documentRoot = mkForce "${pkg}/share/mediawiki";
extraConfig = ''
<Directory "${pkg}/share/mediawiki">
<FilesMatch "\.php$">
<If "-f %{REQUEST_FILENAME}">
SetHandler "proxy:unix:${fpm.listen}|fcgi://localhost/"
</If>
</FilesMatch>
Require all granted
DirectoryIndex index.php
AllowOverride All
</Directory>
'' + optionalString (cfg.uploadsDir != null) ''
Alias "/images" "${cfg.uploadsDir}"
<Directory "${cfg.uploadsDir}">
Require all granted
</Directory>
'';
}
]) ];
};
systemd.tmpfiles.rules = [
"d '${stateDir}' 0750 ${user} ${group} - -"
"d '${cacheDir}' 0750 ${user} ${group} - -"
] ++ optionals (cfg.uploadsDir != null) [
"d '${cfg.uploadsDir}' 0750 ${user} ${group} - -"
"Z '${cfg.uploadsDir}' 0750 ${user} ${group} - -"
];
systemd.services.mediawiki-init = {
wantedBy = [ "multi-user.target" ];
before = [ "phpfpm-mediawiki.service" ];
after = optional cfg.database.createLocally "mysql.service";
script = ''
if ! test -e "${stateDir}/secret.key"; then
tr -dc A-Za-z0-9 </dev/urandom 2>/dev/null | head -c 64 > ${stateDir}/secret.key
fi
echo "exit( wfGetDB( DB_MASTER )->tableExists( 'user' ) ? 1 : 0 );" | \
${pkgs.php}/bin/php ${pkg}/share/mediawiki/maintenance/eval.php --conf ${mediawikiConfig} && \
${pkgs.php}/bin/php ${pkg}/share/mediawiki/maintenance/install.php \
--confpath /tmp \
--scriptpath / \
--dbserver ${cfg.database.host}${optionalString (cfg.database.socket != null) ":${cfg.database.socket}"} \
--dbport ${toString cfg.database.port} \
--dbname ${cfg.database.name} \
${optionalString (cfg.database.tablePrefix != null) "--dbprefix ${cfg.database.tablePrefix}"} \
--dbuser ${cfg.database.user} \
${optionalString (cfg.database.passwordFile != null) "--dbpassfile ${cfg.database.passwordFile}"} \
--passfile ${cfg.passwordFile} \
${cfg.name} \
admin
${pkgs.php}/bin/php ${pkg}/share/mediawiki/maintenance/update.php --conf ${mediawikiConfig} --quick
'';
serviceConfig = {
Type = "oneshot";
User = user;
Group = group;
PrivateTmp = true;
};
};
systemd.services.httpd.after = optional (cfg.database.createLocally && cfg.database.type == "mysql") "mysql.service";
users.users.${user}.group = group;
environment.systemPackages = [ mediawikiScripts ];
};
}

View file

@ -1,349 +0,0 @@
{ config, lib, pkgs, serverInfo, php, ... }:
with lib;
let
httpd = serverInfo.serverConfig.package;
version24 = !versionOlder httpd.version "2.4";
allGranted = if version24 then ''
Require all granted
'' else ''
Order allow,deny
Allow from all
'';
mediawikiConfig = pkgs.writeText "LocalSettings.php"
''
<?php
# Copied verbatim from the default (generated) LocalSettings.php.
if( defined( 'MW_INSTALL_PATH' ) ) {
$IP = MW_INSTALL_PATH;
} else {
$IP = dirname( __FILE__ );
}
$path = array( $IP, "$IP/includes", "$IP/languages" );
set_include_path( implode( PATH_SEPARATOR, $path ) . PATH_SEPARATOR . get_include_path() );
require_once( "$IP/includes/DefaultSettings.php" );
if ( $wgCommandLineMode ) {
if ( isset( $_SERVER ) && array_key_exists( 'REQUEST_METHOD', $_SERVER ) ) {
die( "This script must be run from the command line\n" );
}
}
$wgScriptPath = "${config.urlPrefix}";
# We probably need to set $wgSecretKey and $wgCacheEpoch.
# Paths to external programs.
$wgDiff3 = "${pkgs.diffutils}/bin/diff3";
$wgDiff = "${pkgs.diffutils}/bin/diff";
$wgImageMagickConvertCommand = "${pkgs.imagemagick.out}/bin/convert";
#$wgDebugLogFile = "/tmp/mediawiki_debug_log.txt";
# Database configuration.
$wgDBtype = "${config.dbType}";
$wgDBserver = "${config.dbServer}";
$wgDBuser = "${config.dbUser}";
$wgDBpassword = "${config.dbPassword}";
$wgDBname = "${config.dbName}";
# E-mail.
$wgEmergencyContact = "${config.emergencyContact}";
$wgPasswordSender = "${config.passwordSender}";
$wgSitename = "${config.siteName}";
${optionalString (config.logo != "") ''
$wgLogo = "${config.logo}";
''}
${optionalString (config.articleUrlPrefix != "") ''
$wgArticlePath = "${config.articleUrlPrefix}/$1";
''}
${optionalString config.enableUploads ''
$wgEnableUploads = true;
$wgUploadDirectory = "${config.uploadDir}";
''}
${optionalString (config.defaultSkin != "") ''
$wgDefaultSkin = "${config.defaultSkin}";
''}
${config.extraConfig}
?>
'';
# Unpack Mediawiki and put the config file in its root directory.
mediawikiRoot = pkgs.stdenv.mkDerivation rec {
name= "mediawiki-1.31.1";
src = pkgs.fetchurl {
url = "https://releases.wikimedia.org/mediawiki/1.31/${name}.tar.gz";
sha256 = "13x48clij21cmysjkpnx68vggchrdasqp7b290j87xlfgjhdhnnf";
};
skins = config.skins;
extensions = config.extensions;
buildPhase =
''
for skin in $skins; do
cp -prvd $skin/* skins/
done
for extension in $extensions; do
cp -prvd $extension/* extensions/
done
''; # */
installPhase =
''
mkdir -p $out
cp -r * $out
cp ${mediawikiConfig} $out/LocalSettings.php
sed -i \
-e 's|/bin/bash|${pkgs.bash}/bin/bash|g' \
-e 's|/usr/bin/timeout|${pkgs.coreutils}/bin/timeout|g' \
$out/includes/shell/limit.sh \
$out/includes/GlobalFunctions.php
'';
};
mediawikiScripts = pkgs.runCommand "mediawiki-${config.id}-scripts" {
buildInputs = [ pkgs.makeWrapper ];
preferLocalBuild = true;
} ''
mkdir -p $out/bin
for i in changePassword.php createAndPromote.php userOptions.php edit.php nukePage.php update.php; do
makeWrapper ${php}/bin/php $out/bin/mediawiki-${config.id}-$(basename $i .php) \
--add-flags ${mediawikiRoot}/maintenance/$i
done
'';
in
{
extraConfig =
''
${optionalString config.enableUploads ''
Alias ${config.urlPrefix}/images ${config.uploadDir}
<Directory ${config.uploadDir}>
${allGranted}
Options -Indexes
</Directory>
''}
${if config.urlPrefix != "" then "Alias ${config.urlPrefix} ${mediawikiRoot}" else ''
RewriteEngine On
RewriteCond %{DOCUMENT_ROOT}%{REQUEST_URI} !-f
RewriteCond %{DOCUMENT_ROOT}%{REQUEST_URI} !-d
${concatMapStringsSep "\n" (u: "RewriteCond %{REQUEST_URI} !^${u.urlPath}") serverInfo.vhostConfig.servedDirs}
${concatMapStringsSep "\n" (u: "RewriteCond %{REQUEST_URI} !^${u.urlPath}") serverInfo.vhostConfig.servedFiles}
RewriteRule ${if config.enableUploads
then "!^/images"
else "^.*\$"
} %{DOCUMENT_ROOT}/${if config.articleUrlPrefix == ""
then ""
else "${config.articleUrlPrefix}/"
}index.php [L]
''}
<Directory ${mediawikiRoot}>
${allGranted}
DirectoryIndex index.php
</Directory>
${optionalString (config.articleUrlPrefix != "") ''
Alias ${config.articleUrlPrefix} ${mediawikiRoot}/index.php
''}
'';
documentRoot = if config.urlPrefix == "" then mediawikiRoot else null;
enablePHP = true;
options = {
id = mkOption {
default = "main";
description = ''
A unique identifier necessary to keep multiple MediaWiki server
instances on the same machine apart. This is used to
disambiguate the administrative scripts, which get names like
mediawiki-$id-change-password.
'';
};
dbType = mkOption {
default = "postgres";
example = "mysql";
description = "Database type.";
};
dbName = mkOption {
default = "mediawiki";
description = "Name of the database that holds the MediaWiki data.";
};
dbServer = mkOption {
default = ""; # use a Unix domain socket
example = "10.0.2.2";
description = ''
The location of the database server. Leave empty to use a
database server running on the same machine through a Unix
domain socket.
'';
};
dbUser = mkOption {
default = "mediawiki";
description = "The user name for accessing the database.";
};
dbPassword = mkOption {
default = "";
example = "foobar";
description = ''
The password of the database user. Warning: this is stored in
cleartext in the Nix store!
'';
};
emergencyContact = mkOption {
default = serverInfo.serverConfig.adminAddr;
example = "admin@example.com";
description = ''
Emergency contact e-mail address. Defaults to the Apache
admin address.
'';
};
passwordSender = mkOption {
default = serverInfo.serverConfig.adminAddr;
example = "password@example.com";
description = ''
E-mail address from which password confirmations originate.
Defaults to the Apache admin address.
'';
};
siteName = mkOption {
default = "MediaWiki";
example = "Foobar Wiki";
description = "Name of the wiki";
};
logo = mkOption {
default = "";
example = "/images/logo.png";
description = "The URL of the site's logo (which should be a 135x135px image).";
};
urlPrefix = mkOption {
default = "/w";
description = ''
The URL prefix under which the Mediawiki service appears.
'';
};
articleUrlPrefix = mkOption {
default = "/wiki";
example = "";
description = ''
The URL prefix under which article pages appear,
e.g. http://server/wiki/Page. Leave empty to use the main URL
prefix, e.g. http://server/w/index.php?title=Page.
'';
};
enableUploads = mkOption {
default = false;
description = "Whether to enable file uploads.";
};
uploadDir = mkOption {
default = throw "You must specify `uploadDir'.";
example = "/data/mediawiki-upload";
description = "The directory that stores uploaded files.";
};
defaultSkin = mkOption {
default = "";
example = "nostalgia";
description = "Set this value to change the default skin used by MediaWiki.";
};
skins = mkOption {
default = [];
type = types.listOf types.path;
description =
''
List of paths whose content is copied to the skins
subdirectory of the MediaWiki installation.
'';
};
extensions = mkOption {
default = [];
type = types.listOf types.path;
description =
''
List of paths whose content is copied to the 'extensions'
subdirectory of the MediaWiki installation.
'';
};
extraConfig = mkOption {
type = types.lines;
default = "";
example =
''
$wgEnableEmail = false;
'';
description = ''
Any additional text to be appended to MediaWiki's
configuration file. This is a PHP script. For configuration
settings, see <link xlink:href='https://www.mediawiki.org/wiki/Manual:Configuration_settings'/>.
'';
};
};
extraPath = [ mediawikiScripts ];
# !!! Need to specify that Apache has a dependency on PostgreSQL!
startupScript = pkgs.writeScript "mediawiki_startup.sh"
# Initialise the database automagically if we're using a Postgres
# server on localhost.
(optionalString (config.dbType == "postgres" && config.dbServer == "") ''
if ! ${pkgs.postgresql}/bin/psql -l | grep -q ' ${config.dbName} ' ; then
${pkgs.postgresql}/bin/createuser --no-superuser --no-createdb --no-createrole "${config.dbUser}" || true
${pkgs.postgresql}/bin/createdb "${config.dbName}" -O "${config.dbUser}"
( echo 'CREATE LANGUAGE plpgsql;'
cat ${mediawikiRoot}/maintenance/postgres/tables.sql
echo 'CREATE TEXT SEARCH CONFIGURATION public.default ( COPY = pg_catalog.english );'
echo COMMIT
) | ${pkgs.postgresql}/bin/psql -U "${config.dbUser}" "${config.dbName}"
fi
${php}/bin/php ${mediawikiRoot}/maintenance/update.php
'');
robotsEntries = optionalString (config.articleUrlPrefix != "")
''
User-agent: *
Disallow: ${config.urlPrefix}/
Disallow: ${config.articleUrlPrefix}/Special:Search
Disallow: ${config.articleUrlPrefix}/Special:Random
'';
}

View file

@ -154,6 +154,7 @@ in {
services.hardware.bolt.enable = mkDefault true;
services.xserver.libinput.enable = mkDefault true; # for controlling touchpad settings via gnome control center
systemd.packages = [ pkgs.gnome3.vino ];
xdg.portal.enable = true;
xdg.portal.extraPortals = [ pkgs.xdg-desktop-portal-gtk ];
# If gnome3 is installed, build vim for gtk3 too.
@ -229,7 +230,7 @@ in {
# Use the correct gnome3 packageSet
networking.networkmanager.basePackages =
{ inherit (pkgs) networkmanager modemmanager wpa_supplicant;
{ inherit (pkgs) networkmanager modemmanager wpa_supplicant crda;
inherit (pkgs.gnome3) networkmanager-openvpn networkmanager-vpnc
networkmanager-openconnect networkmanager-fortisslvpn
networkmanager-iodine networkmanager-l2tp; };

View file

@ -147,7 +147,7 @@ in
networking.networkmanager.enable = mkDefault true;
networking.networkmanager.basePackages =
{ inherit (pkgs) networkmanager modemmanager wpa_supplicant;
{ inherit (pkgs) networkmanager modemmanager wpa_supplicant crda;
inherit (pkgs.gnome3) networkmanager-openvpn networkmanager-vpnc
networkmanager-openconnect networkmanager-fortisslvpn
networkmanager-iodine networkmanager-l2tp; };

View file

@ -170,7 +170,7 @@ in
xdg-user-dirs # Update user dirs as described in https://freedesktop.org/wiki/Software/xdg-user-dirs/
]
# Phonon audio backend
++ lib.optional (cfg.phononBackend == "gstreamer") libsForQt5.phonon-backend-gstreamer
++ lib.optional (cfg.phononBackend == "gstreamer" && cfg.enableQt4Support) pkgs.phonon-backend-gstreamer
@ -233,6 +233,7 @@ in
security.pam.services.sddm.enableKwallet = true;
security.pam.services.slim.enableKwallet = true;
xdg.portal.enable = true;
xdg.portal.extraPortals = [ pkgs.xdg-desktop-portal-kde ];
# Update the start menu for each user that is currently logged in

View file

@ -14,6 +14,9 @@ let
# Alias so people can keep using "virtualbox" instead of "vboxvideo".
virtualbox = { modules = [ xorg.xf86videovboxvideo ]; driverName = "vboxvideo"; };
# Alias so that "radeon" uses the xf86-video-ati driver.
radeon = { modules = [ xorg.xf86videoati ]; driverName = "ati"; };
# modesetting does not have a xf86videomodesetting package as it is included in xorgserver
modesetting = {};
};
@ -241,7 +244,7 @@ in
videoDrivers = mkOption {
type = types.listOf types.str;
# !!! We'd like "nv" here, but it segfaults the X server.
default = [ "ati" "cirrus" "vesa" "vmware" "modesetting" ];
default = [ "radeon" "cirrus" "vesa" "vmware" "modesetting" ];
example = [
"ati_unfree" "amdgpu" "amdgpu-pro"
"nv" "nvidia" "nvidiaLegacy390" "nvidiaLegacy340" "nvidiaLegacy304"

View file

@ -18,6 +18,7 @@ in
boot.initrd.extraUtilsCommands = mkIf inInitrd
''
copy_bin_and_libs ${pkgs.xfsprogs.bin}/bin/fsck.xfs
copy_bin_and_libs ${pkgs.xfsprogs.bin}/bin/xfs_repair
'';
# Trick just to set 'sh' after the extraUtils nuke-refs.

View file

@ -85,6 +85,7 @@ in
flannel = handleTestOn ["x86_64-linux"] ./flannel.nix {};
flatpak = handleTest ./flatpak.nix {};
flatpak-builder = handleTest ./flatpak-builder.nix {};
fluentd = handleTest ./fluentd.nix {};
fsck = handleTest ./fsck.nix {};
fwupd = handleTestOn ["x86_64-linux"] ./fwupd.nix {}; # libsmbios is unsupported on aarch64
gdk-pixbuf = handleTest ./gdk-pixbuf.nix {};
@ -146,6 +147,7 @@ in
mailcatcher = handleTest ./mailcatcher.nix {};
mathics = handleTest ./mathics.nix {};
matrix-synapse = handleTest ./matrix-synapse.nix {};
mediawiki = handleTest ./mediawiki.nix {};
memcached = handleTest ./memcached.nix {};
mesos = handleTest ./mesos.nix {};
miniflux = handleTest ./miniflux.nix {};

View file

@ -9,6 +9,7 @@ import ./make-test.nix ({ pkgs, ... }:
machine = { pkgs, ... }: {
services.flatpak.enable = true;
xdg.portal.enable = true;
environment.systemPackages = with pkgs; [ gnome-desktop-testing flatpak-builder ] ++ flatpak-builder.installedTestsDependencies;
virtualisation.diskSize = 2048;
};

46
nixos/tests/fluentd.nix Normal file
View file

@ -0,0 +1,46 @@
import ./make-test.nix ({ pkgs, lib, ... }: {
name = "fluentd";
machine = { pkgs, ... }: {
services.fluentd = {
enable = true;
config = ''
<source>
@type http
port 9880
</source>
<match **>
type copy
<store>
@type file
format json
path /tmp/fluentd
symlink_path /tmp/current-log
</store>
<store>
@type stdout
</store>
</match>
'';
};
};
testScript = let
testMessage = "an example log message";
payload = pkgs.writeText "test-message.json" (builtins.toJSON {
inherit testMessage;
});
in ''
$machine->start;
$machine->waitForUnit('fluentd.service');
$machine->waitForOpenPort(9880);
$machine->succeed("curl -fsSL -X POST -H 'Content-type: application/json' -d @${payload} http://localhost:9880/test.tag");
$machine->succeed("systemctl stop fluentd"); # blocking flush
$machine->succeed("grep '${testMessage}' /tmp/current-log");
'';
})

19
nixos/tests/mediawiki.nix Normal file
View file

@ -0,0 +1,19 @@
import ./make-test.nix ({ pkgs, lib, ... }: {
name = "mediawiki";
meta.maintainers = [ lib.maintainers.aanderse ];
machine =
{ ... }:
{ services.mediawiki.enable = true;
services.mediawiki.virtualHost.hostName = "localhost";
services.mediawiki.virtualHost.adminAddr = "root@example.com";
services.mediawiki.passwordFile = pkgs.writeText "password" "correcthorsebatterystaple";
};
testScript = ''
startAll;
$machine->waitForUnit('phpfpm-mediawiki.service');
$machine->succeed('curl -L http://localhost/') =~ /MediaWiki has been installed/ or die;
'';
})

View file

@ -1,9 +1,44 @@
import ./make-test.nix {
let
grpcPort = 19090;
queryPort = 9090;
minioPort = 9000;
pushgwPort = 9091;
s3 = {
accessKey = "BKIKJAA5BMMU2RHO6IBB";
secretKey = "V7f1CwQqAcwo80UEIJEjc5gVQUSSx5ohQ9GSrr12";
};
objstore.config = {
type = "S3";
config = {
bucket = "thanos-bucket";
endpoint = "s3:${toString minioPort}";
region = "us-east-1";
access_key = s3.accessKey;
secret_key = s3.secretKey;
insecure = true;
signature_version2 = false;
encrypt_sse = false;
put_user_metadata = {};
http_config = {
idle_conn_timeout = "0s";
insecure_skip_verify = false;
};
trace = {
enable = false;
};
};
};
in import ./make-test.nix {
name = "prometheus-2";
nodes = {
one = { pkgs, ... }: {
prometheus = { pkgs, ... }: {
virtualisation.diskSize = 2 * 1024;
environment.systemPackages = [ pkgs.jq ];
networking.firewall.allowedTCPPorts = [ grpcPort ];
services.prometheus2 = {
enable = true;
scrapeConfigs = [
@ -11,7 +46,7 @@ import ./make-test.nix {
job_name = "prometheus";
static_configs = [
{
targets = [ "127.0.0.1:9090" ];
targets = [ "127.0.0.1:${toString queryPort}" ];
labels = { instance = "localhost"; };
}
];
@ -21,7 +56,7 @@ import ./make-test.nix {
scrape_interval = "1s";
static_configs = [
{
targets = [ "127.0.0.1:9091" ];
targets = [ "127.0.0.1:${toString pushgwPort}" ];
}
];
}
@ -35,33 +70,170 @@ import ./make-test.nix {
expr: count(up{job="prometheus"})
''
];
globalConfig = {
external_labels = {
some_label = "required by thanos";
};
};
extraFlags = [
# Required by thanos
"--storage.tsdb.min-block-duration=5s"
"--storage.tsdb.max-block-duration=5s"
];
};
services.prometheus.pushgateway = {
enable = true;
web.listen-address = ":${toString pushgwPort}";
persistMetrics = true;
persistence.interval = "1s";
stateDir = "prometheus-pushgateway";
};
services.thanos = {
sidecar = {
enable = true;
grpc-address = "0.0.0.0:${toString grpcPort}";
inherit objstore;
};
# TODO: Add some tests for these services:
#rule = {
# enable = true;
# http-address = "0.0.0.0:19194";
# grpc-address = "0.0.0.0:19193";
# query.addresses = [
# "localhost:19191"
# ];
# labels = {
# just = "some";
# nice = "labels";
# };
#};
#
#receive = {
# http-address = "0.0.0.0:19195";
# enable = true;
# labels = {
# just = "some";
# nice = "labels";
# };
#};
};
};
query = { pkgs, ... }: {
environment.systemPackages = [ pkgs.jq ];
services.thanos.query = {
enable = true;
http-address = "0.0.0.0:${toString queryPort}";
store.addresses = [
"prometheus:${toString grpcPort}"
];
};
};
store = { pkgs, ... }: {
virtualisation.diskSize = 2 * 1024;
environment.systemPackages = with pkgs; [ jq thanos ];
services.thanos.store = {
enable = true;
http-address = "0.0.0.0:10902";
grpc-address = "0.0.0.0:${toString grpcPort}";
inherit objstore;
sync-block-duration = "1s";
};
services.thanos.compact = {
enable = true;
http-address = "0.0.0.0:10903";
inherit objstore;
consistency-delay = "5s";
};
services.thanos.query = {
enable = true;
http-address = "0.0.0.0:${toString queryPort}";
store.addresses = [
"localhost:${toString grpcPort}"
];
};
};
s3 = { pkgs, ... } : {
# Minio requires at least 1GiB of free disk space to run.
virtualisation.diskSize = 2 * 1024;
networking.firewall.allowedTCPPorts = [ minioPort ];
services.minio = {
enable = true;
inherit (s3) accessKey secretKey;
};
environment.systemPackages = [ pkgs.minio-client ];
};
};
testScript = ''
startAll;
$one->waitForUnit("prometheus2.service");
$one->waitForOpenPort(9090);
$one->succeed("curl -s http://127.0.0.1:9090/metrics");
testScript = { nodes, ... } : ''
# Before starting the other machines we first make sure that our S3 service is online
# and has a bucket added for thanos:
$s3->start;
$s3->waitForUnit("minio.service");
$s3->waitForOpenPort(${toString minioPort});
$s3->succeed(
"mc config host add minio " .
"http://localhost:${toString minioPort} ${s3.accessKey} ${s3.secretKey} S3v4");
$s3->succeed("mc mb minio/thanos-bucket");
# Let's test if pushing a metric to the pushgateway succeeds
# and whether that metric gets ingested by prometheus.
$one->waitForUnit("pushgateway.service");
$one->succeed(
# Now that s3 has started we can start the other machines:
$prometheus->start;
$query->start;
$store->start;
# Check if prometheus responds to requests:
$prometheus->waitForUnit("prometheus2.service");
$prometheus->waitForOpenPort(${toString queryPort});
$prometheus->succeed("curl -s http://127.0.0.1:${toString queryPort}/metrics");
# Let's test if pushing a metric to the pushgateway succeeds:
$prometheus->waitForUnit("pushgateway.service");
$prometheus->succeed(
"echo 'some_metric 3.14' | " .
"curl --data-binary \@- http://127.0.0.1:9091/metrics/job/some_job");
$one->waitUntilSucceeds(
"curl -sf 'http://127.0.0.1:9090/api/v1/query?query=some_metric' " .
"| jq '.data.result[0].value[1]' | grep '\"3.14\"'");
"curl --data-binary \@- http://127.0.0.1:${toString pushgwPort}/metrics/job/some_job");
# Now check whether that metric gets ingested by prometheus.
# Since we'll check for the metric several times on different machines
# we abstract the test using the following function:
# Function to check if the metric "some_metric" has been received and returns the correct value.
local *Machine::waitForMetric = sub {
my ($self) = @_;
$self->waitUntilSucceeds(
"curl -sf 'http://127.0.0.1:${toString queryPort}/api/v1/query?query=some_metric' " .
"| jq '.data.result[0].value[1]' | grep '\"3.14\"'");
};
$prometheus->waitForMetric;
# Let's test if the pushgateway persists metrics to the configured location.
$one->waitUntilSucceeds("test -e /var/lib/prometheus-pushgateway/metrics");
$prometheus->waitUntilSucceeds("test -e /var/lib/prometheus-pushgateway/metrics");
# Test thanos
$prometheus->waitForUnit("thanos-sidecar.service");
# Test if the Thanos query service can correctly retrieve the metric that was send above.
$query->waitForUnit("thanos-query.service");
$query->waitForMetric;
# Test if the Thanos sidecar has correctly uploaded its TSDB to S3, if the
# Thanos storage service has correctly downloaded it from S3 and if the Thanos
# query service running on $store can correctly retrieve the metric:
$store->waitForUnit("thanos-store.service");
$store->waitForMetric;
$store->waitForUnit("thanos-compact.service");
# Test if the Thanos bucket command is able to retrieve blocks from the S3 bucket
# and check if the blocks have the correct labels:
$store->succeed(
"thanos bucket ls" .
" --objstore.config-file=${nodes.store.config.services.thanos.store.objstore.config-file}" .
" --output=json | jq .thanos.labels.some_label | grep 'required by thanos'");
'';
}

View file

@ -188,6 +188,48 @@ let
'';
};
mail = {
exporterConfig = {
enable = true;
user = "mailexporter";
configuration = {
monitoringInterval = "2s";
mailCheckTimeout = "10s";
servers = [ {
name = "testserver";
server = "localhost";
port = 25;
from = "mailexporter@localhost";
to = "mailexporter@localhost";
detectionDir = "/var/spool/mail/mailexporter/new";
} ];
};
};
metricProvider = {
services.postfix.enable = true;
systemd.services.prometheus-mail-exporter = {
after = [ "postfix.service" ];
requires = [ "postfix.service" ];
preStart = ''
mkdir -p 0600 mailexporter/new
'';
serviceConfig = {
ProtectHome = true;
ReadOnlyPaths = "/";
ReadWritePaths = "/var/spool/mail";
WorkingDirectory = "/var/spool/mail";
};
};
users.users.mailexporter.isSystemUser = true;
};
exporterTest = ''
waitForUnit("postfix.service")
waitForUnit("prometheus-mail-exporter.service")
waitForOpenPort(9225)
waitUntilSucceeds("curl -sSf http://localhost:9225/metrics | grep -q 'mail_deliver_success{configname=\"testserver\"} 1'")
'';
};
nginx = {
exporterConfig = {
enable = true;

View file

@ -0,0 +1,38 @@
{ stdenv
, fetchgit
, rustPlatform
, openssl
, pkgconfig
, protobuf
, rustup
}:
rustPlatform.buildRustPackage rec {
pname = "jormungandr";
version = "0.3.1";
src = fetchgit {
url = "https://github.com/input-output-hk/${pname}";
rev = "v${version}";
sha256 = "0ys8sw73c7binxnl79dqi7sxva62bgifbhgyzvvjvmjjdxgq4kfp";
fetchSubmodules = true;
};
cargoSha256 = "0fphjzz78ym15qbka01idnq6vkyf4asrnhrhvxngwc3bifmnj937";
nativeBuildInputs = [ pkgconfig protobuf rustup ];
buildInputs = [ openssl ];
PROTOC = "${protobuf}/bin/protoc";
# Disabling integration tests
doCheck = false;
meta = with stdenv.lib; {
description = "An aspiring blockchain node";
homepage = "https://input-output-hk.github.io/jormungandr/";
license = licenses.mit;
maintainers = [ maintainers.mmahut ];
platforms = platforms.all;
};
}

View file

@ -3,11 +3,11 @@
bitwig-studio1.overrideAttrs (oldAttrs: rec {
name = "bitwig-studio-${version}";
version = "3.0";
version = "3.0.1";
src = fetchurl {
url = "https://downloads.bitwig.com/stable/${version}/bitwig-studio-${version}.deb";
sha256 = "0p7wi1srfzalb0rl94vqppfbnxdfwqzgg5blkdwkf4sx977aihpv";
sha256 = "0k25p1j4kgnhm7p90qp1cz79xddgi6nh1nx1y5wz42x8qrpxya0s";
};
runtimeDependencies = [

View file

@ -1,58 +0,0 @@
diff --git a/src/ugen_osc.cpp b/src/ugen_osc.cpp
index 6b93c6b..dbefe4f 100644
--- a/src/ugen_osc.cpp
+++ b/src/ugen_osc.cpp
@@ -1232,7 +1232,7 @@ CK_DLL_CTRL( gen5_coeffs )
Chuck_Array8 * in_args = (Chuck_Array8 *)GET_CK_OBJECT(ARGS);
// fprintf(stdout, "calling gen10coeffs, %d\n", weights);
- if(in_args<0) return;
+ if(in_args!=0) return;
size = in_args->size();
if(size >= genX_MAX_COEFFS) size = genX_MAX_COEFFS - 1;
@@ -1287,7 +1287,7 @@ CK_DLL_CTRL( gen7_coeffs )
Chuck_Array8 * in_args = (Chuck_Array8 *)GET_CK_OBJECT(ARGS);
// fprintf(stdout, "calling gen10coeffs, %d\n", weights);
- if(in_args<0) return;
+ if(in_args!=0) return;
size = in_args->size();
if(size >= genX_MAX_COEFFS) size = genX_MAX_COEFFS - 1;
@@ -1340,7 +1340,7 @@ CK_DLL_CTRL( gen9_coeffs )
Chuck_Array8 * weights = (Chuck_Array8 *)GET_CK_OBJECT(ARGS);
// fprintf(stdout, "calling gen10coeffs, %d\n", weights);
- if(weights<0) return;
+ if(weights!=0) return;
size = weights->size();
if(size >= genX_MAX_COEFFS) size = genX_MAX_COEFFS - 1;
@@ -1390,7 +1390,7 @@ CK_DLL_CTRL( gen10_coeffs )
Chuck_Array8 * weights = (Chuck_Array8 *)GET_CK_OBJECT(ARGS);
// fprintf(stdout, "calling gen10coeffs, %d\n", weights);
- if(weights<0) return;
+ if(weights!=0) return;
size = weights->size();
if(size >= genX_MAX_COEFFS) size = genX_MAX_COEFFS - 1;
@@ -1441,7 +1441,7 @@ CK_DLL_CTRL( gen17_coeffs )
Chuck_Array8 * weights = (Chuck_Array8 *)GET_CK_OBJECT(ARGS);
// fprintf(stdout, "calling gen17coeffs, %d\n", weights);
- if(weights<0) return;
+ if(weights!=0) return;
size = weights->size();
if(size >= genX_MAX_COEFFS) size = genX_MAX_COEFFS - 1;
@@ -1502,7 +1502,7 @@ CK_DLL_CTRL( curve_coeffs )
Chuck_Array8 * weights = (Chuck_Array8 *)GET_CK_OBJECT(ARGS);
// fprintf(stdout, "calling gen17coeffs, %d\n", weights);
- if(weights<0) goto done;
+ if(weights!=0) goto done;
nargs = weights->size();
if (nargs < 5 || (nargs % 3) != 2) { // check number of args

View file

@ -1,5 +1,5 @@
--- a/src/util_string.cpp 2014-10-27 22:52:11.875981552 +0100
+++ b/src/util_string.cpp 2014-10-27 22:54:18.613001994 +0100
--- a/src/core/util_string.cpp 2014-10-27 22:52:11.875981552 +0100
+++ b/src/core/util_string.cpp 2014-10-27 22:54:18.613001994 +0100
@@ -40,6 +40,10 @@
#include <linux/limits.h>
#endif // __PLATFORM_LINUX__

View file

@ -3,12 +3,12 @@
}:
stdenv.mkDerivation rec {
version = "1.3.5.2";
version = "1.4.0.0";
name = "chuck-${version}";
src = fetchurl {
url = "http://chuck.cs.princeton.edu/release/files/chuck-${version}.tgz";
sha256 = "02z7sglax3j09grj5s1skmw8z6wz7b21hjrm95nrrdpwbxabh079";
sha256 = "1b17rsf7bv45gfhyhfmpz9d4rkxn24c0m2hgmpfjz3nlp0rf7bic";
};
nativeBuildInputs = [ flex bison which ];
@ -17,16 +17,15 @@ stdenv.mkDerivation rec {
++ lib.optional (!stdenv.isDarwin) alsaLib
++ lib.optional stdenv.isDarwin [ AppKit Carbon CoreAudio CoreMIDI CoreServices Kernel ];
patches = [ ./clang.patch ./darwin-limits.patch ];
patches = [ ./darwin-limits.patch ];
NIX_CFLAGS_COMPILE = lib.optional stdenv.isDarwin "-Wno-missing-sysroot";
NIX_LDFLAGS = lib.optional stdenv.isDarwin "-framework MultitouchSupport";
postPatch = ''
substituteInPlace src/makefile --replace "/usr/bin" "$out/bin"
substituteInPlace src/makefile.osx \
substituteInPlace src/core/makefile.x/makefile.osx \
--replace "weak_framework" "framework" \
--replace "MACOSX_DEPLOYMENT_TARGET=10.5" "MACOSX_DEPLOYMENT_TARGET=$MACOSX_DEPLOYMENT_TARGET"
--replace "MACOSX_DEPLOYMENT_TARGET=10.9" "MACOSX_DEPLOYMENT_TARGET=$MACOSX_DEPLOYMENT_TARGET"
'';
makeFlags = [ "-C src" "DESTDIR=$(out)/bin" ];
@ -36,7 +35,7 @@ stdenv.mkDerivation rec {
description = "Programming language for real-time sound synthesis and music creation";
homepage = http://chuck.cs.princeton.edu;
license = licenses.gpl2;
platforms = with platforms; linux ++ darwin;
platforms = platforms.unix;
maintainers = with maintainers; [ ftrvxmtrx ];
};
}

View file

@ -4,13 +4,13 @@
pythonPackages.buildPythonApplication rec {
pname = "mopidy";
version = "2.2.2";
version = "2.2.3";
src = fetchFromGitHub {
owner = "mopidy";
repo = "mopidy";
rev = "v${version}";
sha256 = "01vl162c7ssf69b0m65ys9fxnsqnfa1whwbprnc063lkcnrnlkr1";
sha256 = "0i9rpnlmgrnkgmr9hyx9sky9gzj2cjhay84a0yaijwcb9nmr8nnc";
};
nativeBuildInputs = [ wrapGAppsHook ];

View file

@ -2,11 +2,11 @@
pythonPackages.buildPythonApplication rec {
pname = "Mopidy-Iris";
version = "3.38.0";
version = "3.39.0";
src = pythonPackages.fetchPypi {
inherit pname version;
sha256 = "0w86g037jdihh6a16x7y82qk8yk30frkj23k9axcj9fjyp30r0x5";
sha256 = "1d2g66gvm7yaz4nbxlh23lj2xfkhi3hsg2k646m1za510f8dzlag";
};
propagatedBuildInputs = [

View file

@ -1,7 +1,7 @@
{ fetchurl, stdenv, squashfsTools, xorg, alsaLib, makeWrapper, openssl, freetype
, glib, pango, cairo, atk, gdk-pixbuf, gtk2, cups, nspr, nss, libpng, libnotify
, libgcrypt, systemd, fontconfig, dbus, expat, ffmpeg_3, curl, zlib, gnome3
, at-spi2-atk
, at-spi2-atk, at-spi2-core, apulse
}:
let
@ -10,20 +10,22 @@ let
# If an update breaks things, one of those might have valuable info:
# https://aur.archlinux.org/packages/spotify/
# https://community.spotify.com/t5/Desktop-Linux
version = "1.0.96.181.gf6bc1b6b-12";
version = "1.1.10.546.ge08ef575-19";
# To get the latest stable revision:
# curl -H 'X-Ubuntu-Series: 16' 'https://api.snapcraft.io/api/v1/snaps/details/spotify?channel=stable' | jq '.download_url,.version,.last_updated'
# To get general information:
# curl -H 'Snap-Device-Series: 16' 'https://api.snapcraft.io/v2/snaps/info/spotify' | jq '.'
# More examples of api usage:
# https://github.com/canonical-websites/snapcraft.io/blob/master/webapp/publisher/snaps/views.py
rev = "30";
rev = "36";
deps = [
alsaLib
apulse
atk
at-spi2-atk
at-spi2-core
cairo
cups
curl
@ -72,7 +74,7 @@ stdenv.mkDerivation {
# https://community.spotify.com/t5/Desktop-Linux/Redistribute-Spotify-on-Linux-Distributions/td-p/1695334
src = fetchurl {
url = "https://api.snapcraft.io/api/v1/snaps/download/pOBIoZ2LrCB3rDohMxoYGnbN14EHOgD7_${rev}.snap";
sha512 = "859730fbc80067f0828f7e13eee9a21b13b749f897a50e17c2da4ee672785cfd79e1af6336e609529d105e040dc40f61b6189524783ac93d49f991c4ea8b3c56";
sha512 = "c49f1a86a9b737e64a475bbe62754a36f607669e908eb725a2395f0a0a6b95968e0c8ce27ab2c8b6c92fe8cbacb1ef58de11c79b92dc0f58c2c6d3a140706a1f";
};
buildInputs = [ squashfsTools makeWrapper ];
@ -134,6 +136,8 @@ stdenv.mkDerivation {
librarypath="${stdenv.lib.makeLibraryPath deps}:$libdir"
wrapProgram $out/share/spotify/spotify \
--prefix LD_LIBRARY_PATH : "$librarypath" \
--prefix LD_LIBRARY_PATH : "${apulse}/lib/apulse" \
--set APULSE_PLAYBACK_DEVICE plug:dmix \
--prefix PATH : "${gnome3.zenity}/bin"
# fix Icon line in the desktop file (#48062)
@ -158,7 +162,7 @@ stdenv.mkDerivation {
homepage = https://www.spotify.com/;
description = "Play music from the Spotify music service";
license = licenses.unfree;
maintainers = with maintainers; [ eelco ftrvxmtrx sheenobu mudri timokau ];
maintainers = with maintainers; [ eelco ftrvxmtrx sheenobu mudri timokau angristan ];
platforms = [ "x86_64-linux" ];
};
}

View file

@ -32,13 +32,13 @@ let
in stdenv.mkDerivation rec {
name = "emacs-libvterm-${version}";
version = "unstable-2019-04-28";
version = "unstable-2019-07-22";
src = fetchFromGitHub {
owner = "akermu";
repo = "emacs-libvterm";
rev = "6adcedf3e4aaadeeaff97437044fba17aeb466d4";
sha256 = "1j6qr5bmajig3idhwsaa3zm72w13q9zn77z2dlrhhx3p4bbds3f8";
rev = "301fe9fdfd5fb2496c8428a11e0812fd8a4c0820";
sha256 = "0i1hn5gcxayqcbjrnpgczvbicq2vsyn59646ary3crs0mz9wlbpr";
};
nativeBuildInputs = [ cmake ];

View file

@ -233,6 +233,12 @@ self:
# upstream issue: missing file header
textmate = markBroken super.textmate;
treemacs-magit = super.treemacs-magit.overrideAttrs (attrs: {
# searches for Git at build time
nativeBuildInputs =
(attrs.nativeBuildInputs or []) ++ [ external.git ];
});
# missing OCaml
utop = markBroken super.utop;

View file

@ -1,9 +1,8 @@
{ stdenv, fetchFromGitHub, qmake, pkgconfig, qttools, qtwebengine, hunspell }:
{ stdenv, mkDerivation, fetchFromGitHub, qmake, pkgconfig, qttools, qtwebengine, hunspell }:
stdenv.mkDerivation rec {
mkDerivation rec {
pname = "ghostwriter";
version = "1.8.0";
name = "${pname}-${version}";
src = fetchFromGitHub {
owner = "wereturtle";

View file

@ -5,11 +5,11 @@
stdenv.mkDerivation rec {
name = "avocode-${version}";
version = "3.8.1";
version = "3.9.0";
src = fetchurl {
url = "https://media.avocode.com/download/avocode-app/${version}/avocode-${version}-linux.zip";
sha256 = "1akrrnv0ajzvbhflbpmh4ckcqfqrgdjqfp6d4jqvspqi56zmsr83";
sha256 = "0fk62farnsxz59q82kxagibxmn9p9ckp6ix0wqg297gvasgad31q";
};
libPath = stdenv.lib.makeLibraryPath (with xorg; [

View file

@ -3,13 +3,13 @@
stdenv.mkDerivation rec {
name = "goxel-${version}";
version = "0.9.0";
version = "0.10.0";
src = fetchFromGitHub {
owner = "guillaumechereau";
repo = "goxel";
rev = "v${version}";
sha256 = "1vd1vw5pplm4ig9f5gwnbvndnag1h7j0jj0cnj78gpiv96qak2vw";
sha256 = "1mdw4bs7hvfn0yngd9ial5wzlfkcbhr3wzldb1w7s3s48agixkdr";
};
patches = [ ./disable-imgui_ini.patch ];

View file

@ -1,10 +1,33 @@
{ stdenv, fetchFromGitHub, qt5, libsForQt5
, bison, flex, eigen, boost, libGLU_combined, glew, opencsg, cgal
, mpfr, gmp, glib, pkgconfig, harfbuzz, gettext, freetype, fontconfig
, double-conversion, lib3mf, libzip
{ stdenv
, fetchFromGitHub
, qtbase
, qtmultimedia
, qscintilla
, bison
, flex
, eigen
, boost
, libGLU_combined
, glew
, opencsg
, cgal
, mpfr
, gmp
, glib
, pkgconfig
, harfbuzz
, gettext
, freetype
, fontconfig
, double-conversion
, lib3mf
, libzip
, mkDerivation
, qtmacextras
, qmake
}:
stdenv.mkDerivation rec {
mkDerivation rec {
pname = "openscad";
version = "2019.05";
@ -15,14 +38,14 @@ stdenv.mkDerivation rec {
sha256 = "1qz384jqgk75zxk7sqd22ma9pyd94kh4h6a207ldx7p9rny6vc5l";
};
nativeBuildInputs = [ bison flex pkgconfig gettext qt5.qmake ];
nativeBuildInputs = [ bison flex pkgconfig gettext qmake ];
buildInputs = [
eigen boost glew opencsg cgal mpfr gmp glib
harfbuzz lib3mf libzip double-conversion freetype fontconfig
qtbase qtmultimedia qscintilla
] ++ stdenv.lib.optional stdenv.isLinux libGLU_combined
++ (with qt5; [qtbase qtmultimedia] ++ stdenv.lib.optional stdenv.isDarwin qtmacextras)
++ (with libsForQt5; [qscintilla])
++ stdenv.lib.optional stdenv.isDarwin qtmacextras
;
qmakeFlags = [ "VERSION=${version}" ];

View file

@ -1,7 +1,7 @@
{ stdenv, fetchurl, poppler_utils, pkgconfig, libpng
, imagemagick, libjpeg, fontconfig, podofo, qtbase, qmake, icu, sqlite
, makeWrapper, unrarSupport ? false, chmlib, python2Packages, libusb1, libmtp
, xdg_utils, makeDesktopItem, wrapGAppsHook, removeReferencesTo
, xdg_utils, makeDesktopItem, wrapGAppsHook, removeReferencesTo, qt5
}:
stdenv.mkDerivation rec {
@ -35,7 +35,7 @@ stdenv.mkDerivation rec {
enableParallelBuilding = true;
nativeBuildInputs = [ makeWrapper pkgconfig qmake removeReferencesTo ];
nativeBuildInputs = [ makeWrapper pkgconfig qmake removeReferencesTo qt5.wrapQtAppsHook ];
buildInputs = [
poppler_utils libpng imagemagick libjpeg
@ -48,6 +48,11 @@ stdenv.mkDerivation rec {
odfpy
]);
qtWrapperArgs = [
"--prefix PYTHONPATH: $PYTHONPATH"
"--prefix PATH: ${poppler_utils.out}/bin}"
];
installPhase = ''
runHook preInstall
@ -70,9 +75,8 @@ stdenv.mkDerivation rec {
sed -i "s/env python[0-9.]*/python/" $PYFILES
sed -i "2i import sys; sys.argv[0] = 'calibre'" $out/bin/calibre
for a in $out/bin/*; do
wrapProgram $a --prefix PYTHONPATH : $PYTHONPATH \
--prefix PATH : ${poppler_utils.out}/bin
for program in $out/bin/*; do
wrapQtApp $program
done
# Replace @out@ by the output path.
@ -95,6 +99,10 @@ stdenv.mkDerivation rec {
remove-references-to -t ${podofo.dev} $out/lib/calibre/calibre/plugins/podofo.so
'';
postFixup = ''
'';
disallowedReferences = [ podofo.dev ];
calibreDesktopItem = makeDesktopItem {

View file

@ -1,6 +1,6 @@
{ clipnotify, makeWrapper, xsel, dmenu2, utillinux, gawk, stdenv, fetchFromGitHub, lib }:
{ clipnotify, makeWrapper, xsel, dmenu, utillinux, gawk, stdenv, fetchFromGitHub, lib }:
let
runtimePath = lib.makeBinPath [ clipnotify xsel dmenu2 utillinux gawk ];
runtimePath = lib.makeBinPath [ clipnotify xsel dmenu utillinux gawk ];
in
stdenv.mkDerivation rec {
name = "clipmenu-${version}";

View file

@ -1,29 +0,0 @@
{stdenv, fetchhg, libX11, libXinerama, libXft, zlib}:
with stdenv.lib;
stdenv.mkDerivation rec {
name = "dmenu2-0.3pre-2014-07-08";
src = fetchhg {
url = "https://bitbucket.org/melek/dmenu2";
rev = "36cb94a16edf928bdaaa636123392517ed469be0";
sha256 = "1b17z5ypg6ij7zz3ncp3irc87raccna10y4w490c872a99lp23lv";
};
buildInputs = [ libX11 libXinerama zlib libXft ];
postPatch = ''
sed -ri -e 's!\<(dmenu|stest)\>!'"$out/bin"'/&!g' dmenu_run
'';
preConfigure = [ ''sed -i "s@PREFIX = /usr/local@PREFIX = $out@g" config.mk'' ];
meta = {
description = "A patched fork of the original dmenu - an efficient dynamic menu for X";
homepage = https://bitbucket.org/melek/dmenu2;
license = licenses.mit;
maintainers = [ maintainers.cstrahan ];
platforms = platforms.all;
};
}

View file

@ -15,10 +15,10 @@ stdenv.mkDerivation rec {
};
patchFlags = [ "-p0" ];
configureFlags = [ "--disable-database-updates" ];
nativeBuildInputs = [
intltool pkgconfig
shared-mime-info # For update-mime-database
desktop-file-utils # For update-desktop-database
wrapGAppsHook # Fix error: GLib-GIO-ERROR **: No GSettings schemas are installed on the system
];
buildInputs = [ gdl libchamplain gnome3.adwaita-icon-theme libxml2 ];

View file

@ -1,17 +1,17 @@
{ stdenv, fetchFromGitHub, qmake, qttools, makeWrapper }:
{ mkDerivation, lib, fetchFromGitHub, qmake, qttools }:
stdenv.mkDerivation rec {
mkDerivation rec {
pname = "gpxsee";
version = "7.9";
version = "7.11";
src = fetchFromGitHub {
owner = "tumic0";
repo = "GPXSee";
rev = version;
sha256 = "029l5dhc9nnxiw7p0s4gyfkcqw709z7lz96aq8krs75mfk4fv07k";
sha256 = "1b4ky7m990h3rmam9lb1w6vns1mxd8ri6is3a8qgdl8kd6xcl5d7";
};
nativeBuildInputs = [ qmake makeWrapper ];
nativeBuildInputs = [ qmake ];
buildInputs = [ qttools ];
preConfigure = ''
@ -20,12 +20,7 @@ stdenv.mkDerivation rec {
enableParallelBuilding = true;
postInstall = ''
wrapProgram $out/bin/gpxsee \
--prefix XDG_DATA_DIRS ":" $out/share
'';
meta = with stdenv.lib; {
meta = with lib; {
homepage = https://www.gpxsee.org/;
description = "GPS log file viewer and analyzer";
longDescription = ''

View file

@ -9,6 +9,7 @@
IOKit,
Kernel,
OpenGL,
libcanberra,
libicns,
libpng,
librsvg,
@ -20,18 +21,19 @@
with python3Packages;
buildPythonApplication rec {
pname = "kitty";
version = "0.14.2";
version = "0.14.3";
format = "other";
src = fetchFromGitHub {
owner = "kovidgoyal";
repo = "kitty";
rev = "v${version}";
sha256 = "15iv3k7iryf10n8n67d37x24pzcarq97a3dr42lbld00k1lx19az";
sha256 = "0wi6b6b1nyp16rcpcghk6by62wy6qsamv1xdymyn0zbqgd8h9n6b";
};
buildInputs = [
ncurses harfbuzz
harfbuzz
ncurses
] ++ stdenv.lib.optionals stdenv.isDarwin [
Cocoa
CoreGraphics
@ -43,7 +45,7 @@ buildPythonApplication rec {
python3
zlib
] ++ stdenv.lib.optionals stdenv.isLinux [
fontconfig glfw libunistring libX11
fontconfig glfw libunistring libcanberra libX11
libXrandr libXinerama libXcursor libxkbcommon libXi libXext
wayland-protocols wayland dbus
];

View file

@ -1,19 +1,21 @@
--- a/setup.py
+++ b/setup.py
@@ -744,9 +744,15 @@ Categories=System;TerminalEmulator;
if not os.path.exists(logo_dir):
raise SystemExit('The kitty logo has not been generated, you need to run logo/make.py')
subprocess.check_call([
- 'iconutil', '-c', 'icns', logo_dir, '-o',
+ 'png2icns',
os.path.join('Resources', os.path.basename(logo_dir).partition('.')[0] + '.icns')
- ])
+ ] + [os.path.join(logo_dir, logo) for logo in (
+ 'icon_128x128.png',
+ 'icon_16x16.png',
+ 'icon_256x256.png',
+ 'icon_32x32.png',
+ 'icon_512x512.png',
+ )])
# }}}
# }}}
diff -aru a/setup.py b/setup.py
--- a/setup.py 2019-07-29 11:09:32.000000000 -0400
+++ b/setup.py 2019-07-29 11:11:37.000000000 -0400
@@ -784,9 +784,15 @@
def create_macos_app_icon(where='Resources'):
logo_dir = os.path.abspath(os.path.join('logo', appname + '.iconset'))
subprocess.check_call([
- 'iconutil', '-c', 'icns', logo_dir, '-o',
+ 'png2icns',
os.path.join(where, os.path.basename(logo_dir).partition('.')[0] + '.icns')
- ])
+ ] + [os.path.join(logo_dir, logo) for logo in [
+ 'icon_128x128.png',
+ 'icon_16x16.png',
+ 'icon_256x256.png',
+ 'icon_32x32.png',
+ 'icon_512x512.png',
+ ]])
def create_minimal_macos_bundle(args, where):

View file

@ -1,33 +1,22 @@
{ stdenv, fetchFromGitHub, meson, ninja, pkgconfig, scdoc
, systemd, pango, cairo, gdk-pixbuf
, wayland, wayland-protocols
, fetchpatch }:
, wayland, wayland-protocols }:
stdenv.mkDerivation rec {
pname = "mako";
version = "1.3";
version = "1.4";
src = fetchFromGitHub {
owner = "emersion";
repo = pname;
rev = "v${version}";
sha256 = "17azdc37xsbmx13fkfp23vg9lznrv9fh6nhagn64wdq3nhsxm3b6";
sha256 = "11ymiq6cr2ma0iva1mqybn3j6k73bsc6lv6pcbdq7hkhd4f9b7j9";
};
# to be removed with next release
patches = [
(fetchpatch {
url = "https://github.com/emersion/mako/commit/ca8e763f06756136c534b1bbd2e5b536be6b1995.patch";
sha256 = "09mi7nn2vwc69igxxc6y2m36n3snhsz0ady99yabhrzl17k4ryds";
})
];
nativeBuildInputs = [ meson ninja pkgconfig scdoc wayland-protocols ];
buildInputs = [ systemd pango cairo gdk-pixbuf wayland ];
mesonFlags = [
"-Dicons=enabled" "-Dman-pages=enabled" "-Dzsh-completions=true"
];
mesonFlags = [ "-Dzsh-completions=true" ];
meta = with stdenv.lib; {
description = "A lightweight Wayland notification daemon";

View file

@ -26,17 +26,18 @@ assert i3GapsSupport -> ! i3Support && jsoncpp != null && i3-gaps != null;
stdenv.mkDerivation rec {
pname = "polybar";
version = "3.3.1";
version = "3.4.0";
src = fetchFromGitHub {
owner = "jaagr";
repo = pname;
rev = version;
sha256 = "0qwi6q3qkrz2ip1jd4pxlnsrs2a9ywxyf8rgvbzyilr334rsiywh";
sha256 = "1g3zj0788cdlm8inpl19279bw8zjcy7dzj7q4f1l2d8c8g1jhv0m";
fetchSubmodules = true;
};
meta = with stdenv.lib; {
homepage = "https://polybar.github.io/";
description = "A fast and easy-to-use tool for creating status bars";
longDescription = ''
Polybar aims to help users build beautiful and highly customizable
@ -68,8 +69,8 @@ stdenv.mkDerivation rec {
];
postConfigure = ''
substituteInPlace ../include/settings.hpp --replace \
"${stdenv.cc}" "${stdenv.cc.name}"
substituteInPlace generated-sources/settings.hpp \
--replace "${stdenv.cc}" "${stdenv.cc.name}"
'';
postInstall = if (i3Support || i3GapsSupport) then ''

View file

@ -1,15 +1,14 @@
{ stdenv, fetchFromGitHub, makeWrapper, curl, fribidi, rlwrap, gawk, groff, ncurses }:
stdenv.mkDerivation rec {
name = "${pname}-${version}";
pname = "translate-shell";
version = "0.9.6.10";
version = "0.9.6.11";
src = fetchFromGitHub {
owner = "soimort";
repo = "translate-shell";
rev = "v${version}";
sha256 = "1dmh3flldfhnqfay3a6c5hanqcjwrmbly1bq8mlk022qfi1fv33y";
sha256 = "137fz3ahzf65hfqcs4k7hhrmfjlhlw7wr3gfsvk88bnyqkyw44sm";
};
buildInputs = [ makeWrapper ];

View file

@ -3,11 +3,9 @@
, gtk, girara, gettext, libxml2, check
, sqlite, glib, texlive, libintl, libseccomp
, file, librsvg
, gtk-mac-integration, synctexSupport ? true
, gtk-mac-integration
}:
assert synctexSupport -> texlive != null;
with stdenv.lib;
stdenv.mkDerivation rec {
@ -29,7 +27,8 @@ stdenv.mkDerivation rec {
# "-Dseccomp=enabled"
"-Dmanpages=enabled"
"-Dconvert-icon=enabled"
] ++ optional synctexSupport "-Dsynctex=enabled";
"-Dsynctex=enabled"
];
nativeBuildInputs = [
meson ninja pkgconfig desktop-file-utils python3.pkgs.sphinx
@ -38,8 +37,8 @@ stdenv.mkDerivation rec {
buildInputs = [
gtk girara libintl sqlite glib file librsvg
] ++ optional synctexSupport texlive.bin.core
++ optional stdenv.isLinux libseccomp
texlive.bin.core
] ++ optional stdenv.isLinux libseccomp
++ optional stdenv.isDarwin gtk-mac-integration;
doCheck = true;

View file

@ -1,7 +1,6 @@
{ config, pkgs
# zathura_pdf_mupdf fails to load _opj_create_decompress at runtime on Darwin (https://github.com/NixOS/nixpkgs/pull/61295#issue-277982980)
, useMupdf ? config.zathura.useMupdf or (!pkgs.stdenv.isDarwin)
, synctexSupport ? true }:
, useMupdf ? config.zathura.useMupdf or (!pkgs.stdenv.isDarwin) }:
let
callPackage = pkgs.newScope self;
@ -9,9 +8,7 @@ let
self = rec {
gtk = pkgs.gtk3;
zathura_core = callPackage ./core {
inherit synctexSupport;
};
zathura_core = callPackage ./core { };
zathura_pdf_poppler = callPackage ./pdf-poppler { };

View file

@ -2,7 +2,7 @@
buildGoPackage rec {
name = "nomad-${version}";
version = "0.9.3";
version = "0.9.4";
rev = "v${version}";
goPackagePath = "github.com/hashicorp/nomad";
@ -12,7 +12,7 @@ buildGoPackage rec {
owner = "hashicorp";
repo = "nomad";
inherit rev;
sha256 = "0hn9rr5v2y2pw0pmn27gz8dx5n964dsaf48sh0jhwc95b5q1rjwr";
sha256 = "1jgvnmmrz7ffpm6aamdrvklj94n7b43swk9cycqhlfbnzijianpn";
};
# We disable Nvidia GPU scheduling on Linux, as it doesn't work there:

View file

@ -97,8 +97,8 @@ in rec {
terraform_0_11-full = terraform_0_11.full;
terraform_0_12 = pluggable (generic {
version = "0.12.5";
sha256 = "0p064rhaanwx4szs8hv6mdqad8d2bgfd94h2la11j58xbsxc7hap";
version = "0.12.6";
sha256 = "0vxvciv4amblxx50wivlm60fyj1ardfgdpj3l8cj9fhi79b3khxl";
patches = [ ./provider-path.patch ];
passthru = { inherit plugins; };
});

View file

@ -0,0 +1,125 @@
{ stdenv
, lib
, fetchurl
, pkgconfig
, makeWrapper
, file
, geoip
, hyperscan
, jansson
, libcap_ng
, libevent
, libnet
, libnetfilter_log
, libnetfilter_queue
, libnfnetlink
, libpcap
, libyaml
, luajit
, nspr
, nss
, pcre
, python
, zlib
, redisSupport ? true, redis, hiredis
, rustSupport ? true, rustc, cargo
}: let
libmagic = file;
hyperscanSupport = stdenv.system == "x86_64-linux" || stdenv.system == "i686-linux";
in
stdenv.mkDerivation rec {
pname = "suricata";
version = "4.1.4";
src = fetchurl {
url = "https://www.openinfosecfoundation.org/download/${pname}-${version}.tar.gz";
sha256 = "02901wjf90171rhkymcgp0h48hkn3wv8iwrhz4d8ppraz68hv99d";
};
nativeBuildInputs = [
makeWrapper
pkgconfig
];
buildInputs = [
geoip
jansson
libcap_ng
libevent
libmagic
libnet
libnetfilter_log
libnetfilter_queue
libnfnetlink
libpcap
libyaml
luajit
nspr
nss
pcre
python
zlib
]
++ lib.optional hyperscanSupport [ hyperscan ]
++ lib.optional redisSupport [ redis hiredis ]
++ lib.optional rustSupport [ rustc cargo ]
;
enableParallelBuilding = true;
configureFlags = [
"--disable-gccmarch-native"
"--enable-afl"
"--enable-af-packet"
"--enable-gccprotect"
"--enable-geoip"
"--enable-luajit"
"--enable-nflog"
"--enable-nfqueue"
"--enable-pie"
"--disable-prelude"
"--enable-python"
"--enable-unix-socket"
"--localstatedir=/var"
"--sysconfdir=/etc"
"--with-libnet-includes=${libnet}/include"
"--with-libnet-libraries=${libnet}/lib"
]
++ lib.optional hyperscanSupport [
"--with-libhs-includes=${hyperscan}/include"
"--with-libhs-libraries=${hyperscan}/lib"
]
++ lib.optional redisSupport [ "--enable-hiredis" ]
++ lib.optional rustSupport [
"--enable-rust"
"--enable-rust-experimental"
];
installFlags = [
"e_localstatedir=\${TMPDIR}"
"e_logdir=\${TMPDIR}"
"e_logcertsdir=\${TMPDIR}"
"e_logfilesdir=\${TMPDIR}"
"e_rundir=\${TMPDIR}"
"e_sysconfdir=\${out}/etc/suricata"
"e_sysconfrulesdir=\${out}/etc/suricata/rules"
"localstatedir=\${TMPDIR}"
"runstatedir=\${TMPDIR}"
"sysconfdir=\${out}/etc"
];
installTargets = "install install-conf";
postInstall = ''
wrapProgram "$out/bin/suricatasc" \
--prefix PYTHONPATH : $PYTHONPATH:$(toPythonPath "$out")
'';
meta = with stdenv.lib; {
description = "A free and open source, mature, fast and robust network threat detection engine";
homepage = "https://suricata-ids.org";
license = licenses.gpl2;
platforms = platforms.linux;
maintainers = with maintainers; [ magenbluten ];
};
}

View file

@ -1,36 +1,83 @@
{ stdenv, fetchFromGitLab, meson, ninja, gettext, cargo, rustc, python3, rustPlatform, pkgconfig, gtksourceview
, hicolor-icon-theme, glib, libhandy, gtk3, libsecret, dbus, openssl, sqlite, gst_all_1, wrapGAppsHook, fetchpatch }:
{ stdenv
, fetchFromGitLab
, fetchpatch
, meson
, ninja
, gettext
, cargo
, rustc
, python3
, rustPlatform
, pkgconfig
, gtksourceview
, hicolor-icon-theme
, glib
, libhandy
, gtk3
, dbus
, openssl
, sqlite
, gst_all_1
, cairo
, gdk_pixbuf
, gspell
, wrapGAppsHook
}:
rustPlatform.buildRustPackage rec {
version = "4.0.0";
name = "fractal-${version}";
pname = "fractal";
version = "4.2.0";
src = fetchFromGitLab {
domain = "gitlab.gnome.org";
owner = "GNOME";
repo = "fractal";
rev = version;
sha256 = "05q47jdgbi5jz01280msb8gxnbsrgf2jvglfm6k40f1xw4wxkrzy";
sha256 = "0clwsmd6h759bzlazfq5ig56dbx7npx3h43yspk87j1rm2dp1177";
};
cargoSha256 = "1hwjajkphl5439dymglgj3h92hxgbf7xpipzrga7ga8m10nx1dhl";
nativeBuildInputs = [
meson ninja pkgconfig gettext cargo rustc python3 wrapGAppsHook
];
buildInputs = [
glib gtk3 libhandy dbus openssl sqlite gst_all_1.gstreamer gst_all_1.gst-plugins-base gst_all_1.gst-plugins-bad
gtksourceview hicolor-icon-theme libsecret
cargo
gettext
meson
ninja
pkgconfig
python3
rustc
wrapGAppsHook
];
patches = [
# Fixes build with >= gstreamer 1.15.1
buildInputs = [
cairo
dbus
gdk_pixbuf
glib
gspell
gst_all_1.gst-editing-services
gst_all_1.gst-plugins-bad
gst_all_1.gst-plugins-base
gst_all_1.gstreamer
gtk3
gtksourceview
hicolor-icon-theme
libhandy
openssl
sqlite
];
cargoPatches = [
# https://gitlab.gnome.org/GNOME/fractal/merge_requests/446
(fetchpatch {
url = "https://gitlab.gnome.org/GNOME/fractal/commit/e78f36c25c095ea09c9c421187593706ad7c4065.patch";
sha256 = "1qv7ayhkhgrrldag2lzs9ql17nbc1d72j375ljhhf6cms89r19ir";
url = "https://gitlab.gnome.org/GNOME/fractal/commit/2778acdc6c50bc6f034513029b66b0b092bc4c38.patch";
sha256 = "08v17xmbwrjw688ps4hsnd60d5fm26xj72an3zf6yszha2b97j6y";
})
];
postPatch = ''
patchShebangs scripts/meson_post_install.py
chmod +x scripts/test.sh
patchShebangs scripts/meson_post_install.py scripts/test.sh
'';
# Don't use buildRustPackage phases, only use it for rust deps setup
@ -39,13 +86,11 @@ rustPlatform.buildRustPackage rec {
checkPhase = null;
installPhase = null;
cargoSha256 = "1ax5dv200v8mfx0418bx8sbwpbp6zj469xg75hp78kqfiv83pn1g";
meta = with stdenv.lib; {
description = "Matrix group messaging app";
homepage = https://gitlab.gnome.org/GNOME/fractal;
license = licenses.gpl3;
maintainers = with maintainers; [ dtzWill ];
maintainers = with maintainers; [ dtzWill worldofpeace ];
};
}

View file

@ -1,22 +1,24 @@
{ lib, buildPythonApplication, fetchurl, pythonOlder
{ lib, buildPythonApplication, fetchFromGitHub, pythonOlder
, pytest, aiodns, slixmpp, pyinotify, potr, mpd2, cffi, pkgconfig }:
buildPythonApplication rec {
name = "poezio-${version}";
version = "0.12";
pname = "poezio";
version = "0.12.1";
disabled = pythonOlder "3.4";
buildInputs = [ pytest ];
checkInputs = [ pytest ];
propagatedBuildInputs = [ aiodns slixmpp pyinotify potr mpd2 cffi ];
nativeBuildInputs = [ pkgconfig ];
src = fetchurl {
url = "http://dev.louiz.org/attachments/download/129/${name}.tar.gz";
sha256 = "11n9x82xyjwbqk28lsfnvqwn8qc9flv6w2c64camh6j3148ykpvz";
src = fetchFromGitHub {
owner = pname;
repo = pname;
rev = "v${version}";
sha256 = "04qnsr0l12i55k6xl4q4akx317gai9wv5f1wpkfkq01wp181i5ll";
};
checkPhase = ''
py.test
pytest
'';
meta = with lib; {

View file

@ -1,4 +1,4 @@
{ stdenv, fetchFromGitHub, fetchNodeModules, nodejs-8_x, ruby, sencha
{ stdenv, fetchFromGitHub, fetchNodeModules, nodejs-10_x, ruby, sencha
, auth0ClientID, auth0Domain }:
stdenv.mkDerivation rec {
@ -12,12 +12,12 @@ stdenv.mkDerivation rec {
sha256 = "1h44srl2gzkhjaazpwz1pwy4dp5x776fc685kahlvjlsfls0fvy9";
};
nativeBuildInputs = [ nodejs-8_x ruby sencha ];
nativeBuildInputs = [ nodejs-10_x ruby sencha ];
node_modules = fetchNodeModules {
inherit src;
nodejs = nodejs-8_x;
nodejs = nodejs-10_x;
sha256 = "0qsgr8cq81yismal5sqr02skakqpynwwzk5s98dr5bg91y361fgy";
};

View file

@ -2,26 +2,19 @@
stdenv.mkDerivation rec {
pname = "rambox-pro";
version = "1.1.2";
version = "1.1.4";
dontBuild = true;
dontStrip = true;
buildInputs = [ nss xorg.libxkbfile ];
buildInputs = [ nss xorg.libXext xorg.libxkbfile xorg.libXScrnSaver ];
nativeBuildInputs = [ autoPatchelfHook makeWrapper nodePackages.asar ];
src = fetchurl {
url = "https://github.com/ramboxapp/download/releases/download/v${version}/RamboxPro-${version}-linux-x64.tar.gz";
sha256 = "0rrfpl371hp278b02b9b6745ax29yrdfmxrmkxv6d158jzlv0dlr";
sha256 = "0vwh3km3h46bgynd10s8ijl3aj5sskzncdj14h3k7h4sibd8r71a";
};
postPatch = ''
substituteInPlace resources/app.asar.unpacked/node_modules/ad-block/vendor/depot_tools/create-chromium-git-src \
--replace "/usr/bin/env -S bash -e" "${stdenv.shell}"
substituteInPlace resources/app.asar.unpacked/node_modules/ad-block/node_modules/bloom-filter-cpp/vendor/depot_tools/create-chromium-git-src \
--replace "/usr/bin/env -S bash -e" "${stdenv.shell}"
'';
installPhase = ''
mkdir -p $out/bin $out/opt/RamboxPro $out/share/applications
asar e resources/app.asar $out/opt/RamboxPro/resources/app.asar.unpacked

View file

@ -6,7 +6,7 @@ at-spi2-atk, libuuid, nodePackages
let
version = "4.0.0";
version = "4.0.1";
rpath = stdenv.lib.makeLibraryPath [
alsaLib
@ -51,7 +51,7 @@ let
if stdenv.hostPlatform.system == "x86_64-linux" then
fetchurl {
url = "https://downloads.slack-edge.com/linux_releases/slack-desktop-${version}-amd64.deb";
sha256 = "911a4c05fb4f85181df13f013e82440b0d171862c9cb137dc19b6381d47bd57e";
sha256 = "1g7c8jka750pblsfzjvfyf7sp1m409kybqagml9miif1v71scxv2";
}
else
throw "Slack is not supported on ${stdenv.hostPlatform.system}";
@ -113,6 +113,7 @@ in stdenv.mkDerivation {
description = "Desktop client for Slack";
homepage = https://slack.com;
license = licenses.unfree;
maintainers = [ maintainers.mmahut ];
platforms = [ "x86_64-linux" ];
};
}

View file

@ -31,16 +31,13 @@ in
stdenv.mkDerivation rec {
name = "teamspeak-client-${version}";
version = "3.1.10";
version = "3.3.0";
src = fetchurl {
urls = [
"http://dl.4players.de/ts/releases/${version}/TeamSpeak3-Client-linux_${arch}-${version}.run"
"http://teamspeak.gameserver.gamed.de/ts3/releases/${version}/TeamSpeak3-Client-linux_${arch}-${version}.run"
];
url = "https://files.teamspeak-services.com/releases/client/${version}/TeamSpeak3-Client-linux_${arch}-${version}.run";
sha256 = if stdenv.is64bit
then "17gylj5pxba14c1c98b5rdyyb87c58z8l8yrd1iw5k293wf7iwv3"
else "1bkn3ykrc73wr02qaqwpr4garlqm3424y3dm2fjx6lqcfzm3ms2k";
then "13286dbjp4qiyfv8my1hfpwzns4szdsnqa11j8ygsh5ikgjk338a"
else "04lwclq7nvw73v5fmn9795j5wi54syglc77ldl41caiqqhdqf1i5";
};
# grab the plugin sdk for the desktop icon
@ -61,6 +58,7 @@ stdenv.mkDerivation rec {
''
mv ts3client_linux_${arch} ts3client
echo "patching ts3client..."
patchelf --replace-needed libquazip.so ${quazip}/lib/libquazip5.so ts3client
patchelf \
--interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" \
--set-rpath ${stdenv.lib.makeLibraryPath deps}:$(cat $NIX_CC/nix-support/orig-cc)/${libDir} \

View file

@ -1,18 +1,159 @@
{ qt5 }:
{ mkDerivation, lib, fetchFromGitHub, fetchsvn
, pkgconfig, pythonPackages, cmake, wrapGAppsHook, wrapQtAppsHook, gcc8
, qtbase, qtimageformats, gtk3, libappindicator-gtk3, libnotify, xdg_utils
, dee, ffmpeg, openalSoft, minizip, libopus, alsaLib, libpulseaudio, range-v3
}:
let
mkTelegram = args: qt5.callPackage (import ./generic.nix args) { };
stableVersion = {
stable = true;
version = "1.7.14";
sha256Hash = "1bw804a9kffmn23wv0570wihbvfm7jy9cqmxlv196f4j7bw7zkv3";
# svn log svn://svn.archlinux.org/community/telegram-desktop/trunk
archPatchesRevision = "487779";
archPatchesHash = "0f09hvimb66xqksb2v0zc4ryshx7y7z0rafzjd99x37rpib9f3kq";
with lib;
mkDerivation rec {
name = "telegram-desktop-${version}";
version = "1.7.14";
# Telegram-Desktop with submodules
src = fetchFromGitHub {
owner = "telegramdesktop";
repo = "tdesktop";
rev = "v${version}";
sha256 = "1bw804a9kffmn23wv0570wihbvfm7jy9cqmxlv196f4j7bw7zkv3";
fetchSubmodules = true;
};
# Arch patches (svn export telegram-desktop/trunk)
archPatches = fetchsvn {
url = "svn://svn.archlinux.org/community/telegram-desktop/trunk";
# svn log svn://svn.archlinux.org/community/telegram-desktop/trunk
rev = "487779";
sha256 = "0f09hvimb66xqksb2v0zc4ryshx7y7z0rafzjd99x37rpib9f3kq";
};
patches = [
"${archPatches}/tdesktop.patch"
"${archPatches}/no-gtk2.patch"
# "${archPatches}/Use-system-wide-font.patch"
"${archPatches}/tdesktop_lottie_animation_qtdebug.patch"
"${archPatches}/issue6219.patch"
];
postPatch = ''
substituteInPlace Telegram/SourceFiles/platform/linux/linux_libs.cpp \
--replace '"appindicator3"' '"${libappindicator-gtk3}/lib/libappindicator3.so"'
substituteInPlace Telegram/SourceFiles/platform/linux/linux_libnotify.cpp \
--replace '"notify"' '"${libnotify}/lib/libnotify.so"'
'';
nativeBuildInputs = [ pkgconfig pythonPackages.gyp cmake wrapGAppsHook wrapQtAppsHook gcc8 ];
# We want to run wrapProgram manually (with additional parameters)
dontWrapGApps = true;
dontWrapQtApps = true;
buildInputs = [
qtbase qtimageformats gtk3 libappindicator-gtk3
dee ffmpeg openalSoft minizip libopus alsaLib libpulseaudio range-v3
];
enableParallelBuilding = true;
GYP_DEFINES = concatStringsSep "," [
"TDESKTOP_DISABLE_CRASH_REPORTS"
"TDESKTOP_DISABLE_AUTOUPDATE"
"TDESKTOP_DISABLE_REGISTER_CUSTOM_SCHEME"
];
NIX_CFLAGS_COMPILE = [
"-DTDESKTOP_DISABLE_CRASH_REPORTS"
"-DTDESKTOP_DISABLE_AUTOUPDATE"
"-DTDESKTOP_DISABLE_REGISTER_CUSTOM_SCHEME"
"-I${minizip}/include/minizip"
# See Telegram/gyp/qt.gypi
"-I${getDev qtbase}/mkspecs/linux-g++"
] ++ concatMap (x: [
"-I${getDev qtbase}/include/${x}"
"-I${getDev qtbase}/include/${x}/${qtbase.version}"
"-I${getDev qtbase}/include/${x}/${qtbase.version}/${x}"
"-I${getDev libopus}/include/opus"
"-I${getDev alsaLib}/include/alsa"
"-I${getDev libpulseaudio}/include/pulse"
]) [ "QtCore" "QtGui" "QtDBus" ];
CPPFLAGS = NIX_CFLAGS_COMPILE;
preConfigure = ''
patch -R -Np1 -i "${archPatches}/demibold.patch"
pushd "Telegram/ThirdParty/libtgvoip"
patch -Np1 -i "${archPatches}/libtgvoip.patch"
popd
# disable static-qt for rlottie
sed "/RLOTTIE_WITH_STATIC_QT/d" -i "Telegram/gyp/lib_rlottie.gyp"
sed -i Telegram/gyp/telegram_linux.gypi \
-e 's,/usr,/does-not-exist,g' \
-e 's,appindicator-0.1,appindicator3-0.1,g' \
-e 's,-flto,,g'
sed -i Telegram/gyp/qt.gypi \
-e "s,/usr/include/qt/QtCore/,${qtbase.dev}/include/QtCore/,g" \
-e 's,\d+",\d+" | head -n1,g'
sed -i Telegram/gyp/qt_moc.gypi \
-e "s,/usr/bin/moc,moc,g"
sed -i Telegram/gyp/qt_rcc.gypi \
-e "s,/usr/bin/rcc,rcc,g"
# Build system assumes x86, but it works fine on non-x86 if we patch this one flag out
sed -i Telegram/ThirdParty/libtgvoip/libtgvoip.gyp \
-e "/-msse2/d"
gyp \
-Dapi_id=17349 \
-Dapi_hash=344583e45741c457fe1862106095a5eb \
-Dbuild_defines=${GYP_DEFINES} \
-Gconfig=Release \
--depth=Telegram/gyp \
--generator-output=../.. \
-Goutput_dir=out \
--format=cmake \
Telegram/gyp/Telegram.gyp
cd out/Release
NUM=$((`wc -l < CMakeLists.txt` - 2))
sed -i "$NUM r $archPatches/CMakeLists.inj" CMakeLists.txt
export ASM=$(type -p gcc)
'';
cmakeFlags = [ "-UTDESKTOP_OFFICIAL_TARGET" ];
installPhase = ''
install -Dm755 Telegram $out/bin/telegram-desktop
mkdir -p $out/share/applications $out/share/kde4/services
install -m444 "$src/lib/xdg/telegramdesktop.desktop" "$out/share/applications/telegram-desktop.desktop"
sed "s,/usr/bin,$out/bin,g" $archPatches/tg.protocol > $out/share/kde4/services/tg.protocol
for icon_size in 16 32 48 64 128 256 512; do
install -Dm644 "../../../Telegram/Resources/art/icon''${icon_size}.png" "$out/share/icons/hicolor/''${icon_size}x''${icon_size}/apps/telegram.png"
done
'';
postFixup = ''
# This is necessary to run Telegram in a pure environment.
# We also use gappsWrapperArgs from wrapGAppsHook.
wrapProgram $out/bin/telegram-desktop \
"''${gappsWrapperArgs[@]}" \
"''${qtWrapperArgs[@]}" \
--prefix PATH : ${xdg_utils}/bin \
--set XDG_RUNTIME_DIR "XDG-RUNTIME-DIR"
sed -i $out/bin/telegram-desktop \
-e "s,'XDG-RUNTIME-DIR',\"\''${XDG_RUNTIME_DIR:-/run/user/\$(id --user)}\","
'';
meta = {
description = "Telegram Desktop messaging app";
license = licenses.gpl3;
platforms = platforms.linux;
homepage = https://desktop.telegram.org/;
maintainers = with maintainers; [ primeos abbradar ];
};
in {
stable = mkTelegram stableVersion;
preview = mkTelegram (stableVersion // {
stable = false;
});
}

View file

@ -1,161 +0,0 @@
{ stable, version, sha256Hash, archPatchesRevision, archPatchesHash }:
{ mkDerivation, lib, fetchFromGitHub, fetchsvn
, pkgconfig, pythonPackages, cmake, wrapGAppsHook, wrapQtAppsHook, gcc8
, qtbase, qtimageformats, gtk3, libappindicator-gtk3, libnotify, xdg_utils
, dee, ffmpeg, openalSoft, minizip, libopus, alsaLib, libpulseaudio, range-v3
}:
with lib;
mkDerivation rec {
name = "telegram-desktop-${version}";
inherit version;
# Telegram-Desktop with submodules
src = fetchFromGitHub {
owner = "telegramdesktop";
repo = "tdesktop";
rev = "v${version}";
sha256 = sha256Hash;
fetchSubmodules = true;
};
# Arch patches (svn export telegram-desktop/trunk)
archPatches = fetchsvn {
url = "svn://svn.archlinux.org/community/telegram-desktop/trunk";
rev = archPatchesRevision;
sha256 = archPatchesHash;
};
patches = [
"${archPatches}/tdesktop.patch"
"${archPatches}/no-gtk2.patch"
# "${archPatches}/Use-system-wide-font.patch"
"${archPatches}/tdesktop_lottie_animation_qtdebug.patch"
"${archPatches}/issue6219.patch"
];
postPatch = ''
substituteInPlace Telegram/SourceFiles/platform/linux/linux_libs.cpp \
--replace '"appindicator3"' '"${libappindicator-gtk3}/lib/libappindicator3.so"'
substituteInPlace Telegram/SourceFiles/platform/linux/linux_libnotify.cpp \
--replace '"notify"' '"${libnotify}/lib/libnotify.so"'
'';
nativeBuildInputs = [ pkgconfig pythonPackages.gyp cmake wrapGAppsHook wrapQtAppsHook gcc8 ];
# We want to run wrapProgram manually (with additional parameters)
dontWrapGApps = true;
buildInputs = [
qtbase qtimageformats gtk3 libappindicator-gtk3
dee ffmpeg openalSoft minizip libopus alsaLib libpulseaudio range-v3
];
enableParallelBuilding = true;
GYP_DEFINES = concatStringsSep "," [
"TDESKTOP_DISABLE_CRASH_REPORTS"
"TDESKTOP_DISABLE_AUTOUPDATE"
"TDESKTOP_DISABLE_REGISTER_CUSTOM_SCHEME"
];
NIX_CFLAGS_COMPILE = [
"-DTDESKTOP_DISABLE_CRASH_REPORTS"
"-DTDESKTOP_DISABLE_AUTOUPDATE"
"-DTDESKTOP_DISABLE_REGISTER_CUSTOM_SCHEME"
"-I${minizip}/include/minizip"
# See Telegram/gyp/qt.gypi
"-I${getDev qtbase}/mkspecs/linux-g++"
] ++ concatMap (x: [
"-I${getDev qtbase}/include/${x}"
"-I${getDev qtbase}/include/${x}/${qtbase.version}"
"-I${getDev qtbase}/include/${x}/${qtbase.version}/${x}"
"-I${getDev libopus}/include/opus"
"-I${getDev alsaLib}/include/alsa"
"-I${getDev libpulseaudio}/include/pulse"
]) [ "QtCore" "QtGui" "QtDBus" ];
CPPFLAGS = NIX_CFLAGS_COMPILE;
preConfigure = ''
patch -R -Np1 -i "${archPatches}/demibold.patch"
pushd "Telegram/ThirdParty/libtgvoip"
patch -Np1 -i "${archPatches}/libtgvoip.patch"
popd
# disable static-qt for rlottie
sed "/RLOTTIE_WITH_STATIC_QT/d" -i "Telegram/gyp/lib_rlottie.gyp"
sed -i Telegram/gyp/telegram_linux.gypi \
-e 's,/usr,/does-not-exist,g' \
-e 's,appindicator-0.1,appindicator3-0.1,g' \
-e 's,-flto,,g'
sed -i Telegram/gyp/qt.gypi \
-e "s,/usr/include/qt/QtCore/,${qtbase.dev}/include/QtCore/,g" \
-e 's,\d+",\d+" | head -n1,g'
sed -i Telegram/gyp/qt_moc.gypi \
-e "s,/usr/bin/moc,moc,g"
sed -i Telegram/gyp/qt_rcc.gypi \
-e "s,/usr/bin/rcc,rcc,g"
# Build system assumes x86, but it works fine on non-x86 if we patch this one flag out
sed -i Telegram/ThirdParty/libtgvoip/libtgvoip.gyp \
-e "/-msse2/d"
gyp \
-Dapi_id=17349 \
-Dapi_hash=344583e45741c457fe1862106095a5eb \
-Dbuild_defines=${GYP_DEFINES} \
-Gconfig=Release \
--depth=Telegram/gyp \
--generator-output=../.. \
-Goutput_dir=out \
--format=cmake \
Telegram/gyp/Telegram.gyp
cd out/Release
NUM=$((`wc -l < CMakeLists.txt` - 2))
sed -i "$NUM r $archPatches/CMakeLists.inj" CMakeLists.txt
export ASM=$(type -p gcc)
'';
cmakeFlags = [ "-UTDESKTOP_OFFICIAL_TARGET" ];
installPhase = ''
install -Dm755 Telegram $out/bin/telegram-desktop
mkdir -p $out/share/applications $out/share/kde4/services
install -m444 "$src/lib/xdg/telegramdesktop.desktop" "$out/share/applications/telegram-desktop.desktop"
sed "s,/usr/bin,$out/bin,g" $archPatches/tg.protocol > $out/share/kde4/services/tg.protocol
for icon_size in 16 32 48 64 128 256 512; do
install -Dm644 "../../../Telegram/Resources/art/icon''${icon_size}.png" "$out/share/icons/hicolor/''${icon_size}x''${icon_size}/apps/telegram.png"
done
'';
dontWrapQtApps = true;
postFixup = ''
# This is necessary to run Telegram in a pure environment.
# We also use gappsWrapperArgs from wrapGAppsHook.
wrapProgram $out/bin/telegram-desktop \
"''${gappsWrapperArgs[@]}" \
"''${qtWrapperArgs[@]}" \
--prefix PATH : ${xdg_utils}/bin \
--set XDG_RUNTIME_DIR "XDG-RUNTIME-DIR"
sed -i $out/bin/telegram-desktop \
-e "s,'XDG-RUNTIME-DIR',\"\''${XDG_RUNTIME_DIR:-/run/user/\$(id --user)}\","
'';
meta = {
description = "Telegram Desktop messaging app "
+ (if stable then "(stable version)" else "(pre-release)");
license = licenses.gpl3;
platforms = platforms.linux;
homepage = https://desktop.telegram.org/;
maintainers = with maintainers; [ primeos abbradar ];
};
}

View file

@ -1,103 +1,132 @@
{ stdenv, fetchurl, dpkg, makeDesktopItem, libuuid, gtk3, atk, cairo, pango
, gdk-pixbuf, glib, freetype, fontconfig, dbus, libnotify, libX11, xorg, libXi
, libXcursor, libXdamage, libXrandr, libXcomposite, libXext, libXfixes
, libXrender, libXtst, libXScrnSaver, nss, nspr, alsaLib, cups, expat, udev
, xdg_utils, hunspell, pulseaudio, pciutils, at-spi2-atk
{ stdenv, fetchurl, makeDesktopItem
, alsaLib, at-spi2-atk, atk, cairo, cups, dbus, dpkg, expat, fontconfig
, freetype, gdk_pixbuf, glib, gtk3, hunspell, libX11, libXScrnSaver
, libXcomposite, libXcursor, libXdamage, libXext, libXfixes, libXi, libXrandr
, libXrender, libXtst, libnotify, libuuid, nspr, nss, pango, pciutils
, pulseaudio, udev, xdg_utils, xorg
, cpio, xar
}:
let
rpath = stdenv.lib.makeLibraryPath [
alsaLib
atk
cairo
cups
dbus
expat
fontconfig
freetype
gdk-pixbuf
glib
gtk3
at-spi2-atk
hunspell
libuuid
libnotify
libX11
libXcomposite
libXcursor
libXdamage
libXext
libXfixes
libXi
libXrandr
libXrender
libXScrnSaver
libXtst
nspr
nss
pango
pciutils
pulseaudio
stdenv.cc.cc
udev
xdg_utils
xorg.libxcb
];
inherit (stdenv.hostPlatform) system;
in
stdenv.mkDerivation rec {
pname = "wire-desktop";
version = "3.9.2895";
src = fetchurl {
url = "https://wire-app.wire.com/linux/debian/pool/main/Wire-${version}_amd64.deb";
sha256 = "0wrn95m64j4b7ym44h9zawq13kg4m12aixlyyzp56bfyczmjq4a5";
};
version = {
"x86_64-linux" = "3.9.2895";
"x86_64-darwin" = "3.9.2943";
}.${system} or "";
desktopItem = makeDesktopItem {
name = "wire-desktop";
exec = "wire-desktop %U";
icon = "wire-desktop";
comment = "Secure messenger for everyone";
desktopName = "Wire Desktop";
genericName = "Secure messenger";
categories = "Network;InstantMessaging;Chat;VideoConference";
};
dontBuild = true;
dontPatchELF = true;
dontConfigure = true;
nativeBuildInputs = [ dpkg ];
unpackPhase = "dpkg-deb -x $src .";
installPhase = ''
mkdir -p "$out"
cp -R "opt" "$out"
cp -R "usr/share" "$out/share"
chmod -R g-w "$out"
# Patch wire-desktop
patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" \
--set-rpath "${rpath}:$out/opt/Wire" \
"$out/opt/Wire/wire-desktop"
# Symlink to bin
mkdir -p "$out/bin"
ln -s "$out/opt/Wire/wire-desktop" "$out/bin/wire-desktop"
# Desktop file
mkdir -p "$out/share/applications"
cp "${desktopItem}/share/applications/"* "$out/share/applications"
'';
sha256 = {
"x86_64-linux" = "0wrn95m64j4b7ym44h9zawq13kg4m12aixlyyzp56bfyczmjq4a5";
"x86_64-darwin" = "1y1bzsjmjrj518q29xfx6gg1nhdbaz7y5hzaqrp241az6plp090k";
}.${system} or "";
meta = with stdenv.lib; {
description = "A modern, secure messenger";
description = "A modern, secure messenger for everyone";
longDescription = ''
Wire Personal is a secure, privacy-friendly messenger. It combines useful
and fun features, audited security, and a beautiful, distinct user
interface. It does not require a phone number to register and chat.
* End-to-end encrypted chats, calls, and files
* Crystal clear voice and video calling
* File and screen sharing
* Timed messages and chats
* Synced across your phone, desktop and tablet
'';
homepage = https://wire.com/;
license = licenses.gpl3;
maintainers = with maintainers; [ worldofpeace ];
platforms = [ "x86_64-linux" ];
downloadPage = https://wire.com/download/;
license = licenses.gpl3Plus;
maintainers = with maintainers; [ toonn worldofpeace ];
platforms = [ "x86_64-darwin" "x86_64-linux" ];
};
}
linux = stdenv.mkDerivation rec {
inherit pname version meta;
src = fetchurl {
url = "https://wire-app.wire.com/linux/debian/pool/main/"
+ "Wire-${version}_amd64.deb";
inherit sha256;
};
desktopItem = makeDesktopItem {
name = "wire-desktop";
exec = "wire-desktop %U";
icon = "wire-desktop";
comment = "Secure messenger for everyone";
desktopName = "Wire Desktop";
genericName = "Secure messenger";
categories = "Network;InstantMessaging;Chat;VideoConference";
};
dontBuild = true;
dontPatchELF = true;
dontConfigure = true;
nativeBuildInputs = [ dpkg ];
rpath = stdenv.lib.makeLibraryPath [
alsaLib at-spi2-atk atk cairo cups dbus expat fontconfig freetype
gdk_pixbuf glib gtk3 hunspell libX11 libXScrnSaver libXcomposite
libXcursor libXdamage libXext libXfixes libXi libXrandr libXrender
libXtst libnotify libuuid nspr nss pango pciutils pulseaudio
stdenv.cc.cc udev xdg_utils xorg.libxcb
];
unpackPhase = "dpkg-deb -x $src .";
installPhase = ''
mkdir -p "$out"
cp -R "opt" "$out"
cp -R "usr/share" "$out/share"
chmod -R g-w "$out"
# Patch wire-desktop
patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" \
--set-rpath "${rpath}:$out/opt/Wire" \
"$out/opt/Wire/wire-desktop"
# Symlink to bin
mkdir -p "$out/bin"
ln -s "$out/opt/Wire/wire-desktop" "$out/bin/wire-desktop"
# Desktop file
mkdir -p "$out/share/applications"
cp "${desktopItem}/share/applications/"* "$out/share/applications"
'';
};
darwin = stdenv.mkDerivation rec {
inherit pname version meta;
src = fetchurl {
url = "https://github.com/wireapp/wire-desktop/releases/download/"
+ "macos%2F${version}/Wire.pkg";
inherit sha256;
};
buildInputs = [ cpio xar ];
unpackPhase = ''
xar -xf $src
cd com.wearezeta.zclient.mac.pkg
'';
buildPhase = ''
cat Payload | gunzip -dc | cpio -i
'';
installPhase = ''
mkdir -p $out/Applications
cp -r Wire.app $out/Applications
'';
};
in if stdenv.isDarwin
then darwin
else linux

View file

@ -1,4 +1,5 @@
{ stdenv, fetchurl, makeWrapper, makeDesktopItem, autoPatchelfHook, env
{ stdenv, fetchurl, mkDerivation, autoPatchelfHook
, fetchFromGitHub
# Dynamic libraries
, dbus, glib, libGL, libX11, libXfixes, libuuid, libxcb, qtbase, qtdeclarative
, qtimageformats, qtlocation, qtquickcontrols, qtquickcontrols2, qtscript, qtsvg
@ -21,23 +22,26 @@ let
};
};
qtDeps = [
qtbase qtdeclarative qtlocation qtquickcontrols qtquickcontrols2 qtscript
qtwebchannel qtwebengine qtimageformats qtsvg qttools qtwayland
];
# Used for icons, appdata, and desktop file.
desktopIntegration = fetchFromGitHub {
owner = "flathub";
repo = "us.zoom.Zoom";
rev = "0d294e1fdd2a4ef4e05d414bc680511f24d835d7";
sha256 = "0rm188844a10v8d6zgl2pnwsliwknawj09b02iabrvjw5w1lp6wl";
};
qtEnv = env "zoom-us-qt-${qtbase.version}" qtDeps;
in stdenv.mkDerivation {
in mkDerivation {
name = "zoom-us-${version}";
src = srcs.${stdenv.hostPlatform.system};
nativeBuildInputs = [ autoPatchelfHook makeWrapper ];
nativeBuildInputs = [ autoPatchelfHook ];
buildInputs = [
dbus glib libGL libX11 libXfixes libuuid libxcb qtEnv libjpeg_turbo
] ++ qtDeps;
dbus glib libGL libX11 libXfixes libuuid libxcb libjpeg_turbo
qtbase qtdeclarative qtlocation qtquickcontrols qtquickcontrols2 qtscript
qtwebchannel qtwebengine qtimageformats qtsvg qttools qtwayland
];
runtimeDependencies = optional pulseaudioSupport libpulseaudio;
@ -60,33 +64,43 @@ in stdenv.mkDerivation {
in ''
runHook preInstall
packagePath=$out/share/zoom-us
mkdir -p $packagePath $out/bin
mkdir -p $out/{bin,share/zoom-us}
cp -ar ${files} $packagePath
cp -ar ${files} $out/share/zoom-us
# TODO Patch this somehow; tries to dlopen './libturbojpeg.so' from cwd
ln -s $(readlink -e "${libjpeg_turbo.out}/lib/libturbojpeg.so") $packagePath/libturbojpeg.so
ln -s ${qtEnv}/bin/qt.conf $packagePath
makeWrapper $packagePath/zoom $out/bin/zoom-us \
--prefix PATH : "${makeBinPath [ coreutils glib.dev pciutils procps qttools.dev utillinux ]}" \
--prefix LD_PRELOAD : "${libv4l}/lib/libv4l/v4l2convert.so" \
--run "cd $packagePath"
ln -s $(readlink -e "${libjpeg_turbo.out}/lib/libturbojpeg.so") $out/share/zoom-us/libturbojpeg.so
runHook postInstall
'';
postInstall = (makeDesktopItem {
name = "zoom-us";
exec = "$out/bin/zoom-us %U";
icon = "$out/share/zoom-us/application-x-zoom.png";
desktopName = "Zoom";
genericName = "Video Conference";
categories = "Network;Application;";
mimeType = "x-scheme-handler/zoommtg;";
}).buildCommand;
postInstall = ''
mkdir -p $out/share/{applications,appdata,icons}
# Desktop File
cp ${desktopIntegration}/us.zoom.Zoom.desktop $out/share/applications
substituteInPlace $out/share/applications/us.zoom.Zoom.desktop \
--replace "Exec=zoom" "Exec=$out/bin/zoom-us"
# Appdata
cp ${desktopIntegration}/us.zoom.Zoom.appdata.xml $out/share/appdata
# Icons
for icon_size in 64 96 128 256; do
path=$icon_size'x'$icon_size
icon=${desktopIntegration}/us.zoom.Zoom.$icon_size.png
mkdir -p $out/share/icons/hicolor/$path/apps
cp $icon $out/share/icons/hicolor/$path/apps/us.zoom.Zoom.png
done
ln -s $out/share/zoom-us/zoom $out/bin/zoom-us
'';
qtWrapperArgs = [
''--prefix PATH : ${makeBinPath [ coreutils glib.dev pciutils procps qttools.dev utillinux ]}''
''--prefix LD_PRELOAD : ${libv4l}/lib/libv4l/v4l2convert.so''
];
passthru.updateScript = ./update.sh;

View file

@ -1,15 +1,15 @@
{ stdenv, buildGoModule, fetchurl
, go, scdoc
, go, ncurses, scdoc
, python3, perl, w3m, dante
}:
buildGoModule rec {
pname = "aerc";
version = "0.1.4";
version = "0.2.1";
src = fetchurl {
url = "https://git.sr.ht/~sircmpwn/aerc/archive/${version}.tar.gz";
sha256 = "0vlqgcjbq6yp7ffrfs3zwa9hrm4vyx9245v9pkqdn328xlff3h55";
sha256 = "1ky1nl5b54lf5jnac2kb5404fplwnwypjplas8imdlsf517fw32n";
};
nativeBuildInputs = [
@ -38,12 +38,13 @@ buildGoModule rec {
'';
postFixup = ''
wrapProgram $out/bin/aerc --prefix PATH ":" "$out/share/aerc/filters"
wrapProgram $out/bin/aerc --prefix PATH ":" \
"$out/share/aerc/filters:${stdenv.lib.makeBinPath [ ncurses.dev ]}"
wrapProgram $out/share/aerc/filters/html --prefix PATH ":" \
${stdenv.lib.makeBinPath [ w3m dante ]}
'';
modSha256 = "0v1b76nax5295bjrq19wdzm2ixiszlk7j1v1k9sjz4la07h5bvfj";
modSha256 = "0fc9m1qb8innypc8cxzbqyrfkawawyaqq3gqy7lqwmyh32f300jh";
meta = with stdenv.lib; {
description = "aerc is an email client for your terminal";

View file

@ -14,7 +14,7 @@ assert iceSupport -> zeroc_ice != null;
with stdenv.lib;
let
generic = overrides: source: stdenv.mkDerivation (source // overrides // {
generic = overrides: source: (if source.qtVersion == 5 then qt5.mkDerivation else stdenv.mkDerivation) (source // overrides // {
name = "${overrides.type}-${source.version}";
patches = (source.patches or []) ++ optional jackSupport ./mumble-jack-support.patch;
@ -26,7 +26,7 @@ let
# protobuf is freezed to 3.6 because of this bug: https://github.com/mumble-voip/mumble/issues/3617
# this could be reverted to the latest version in a future release of mumble as it is already fixed in master
buildInputs = [ boost protobuf3_6 avahi ]
++ { qt4 = [ qt4 ]; qt5 = [ qt5.qtbase ]; }."qt${toString source.qtVersion}"
++ optional (source.qtVersion == 4) qt4
++ (overrides.buildInputs or [ ]);
qmakeFlags = [
@ -45,20 +45,23 @@ let
++ (overrides.configureFlags or [ ]);
preConfigure = ''
qmakeFlags="$qmakeFlags DEFINES+=PLUGIN_PATH=$out/lib"
qmakeFlags="$qmakeFlags DEFINES+=PLUGIN_PATH=$out/lib/mumble"
patchShebangs scripts
'';
makeFlags = [ "release" ];
installPhase = ''
mkdir -p $out/{lib,bin}
find release -type f -not -name \*.\* -exec cp {} $out/bin \;
find release -type f -name \*.\* -exec cp {} $out/lib \;
runHook preInstall
${overrides.installPhase}
# doc stuff
mkdir -p $out/share/man/man1
cp man/mum* $out/share/man/man1
'' + (overrides.installPhase or "");
install -Dm644 man/mum* $out/share/man/man1/
runHook postInstall
'';
enableParallelBuilding = true;
@ -74,7 +77,7 @@ let
client = source: generic {
type = "mumble";
nativeBuildInputs = optionals (source.qtVersion == 5) [ qt5.qttools ];
nativeBuildInputs = optional (source.qtVersion == 5) qt5.qttools;
buildInputs = [ libopus libsndfile speex ]
++ optional (source.qtVersion == 5) qt5.qtsvg
++ optional stdenv.isLinux alsaLib
@ -89,12 +92,19 @@ let
NIX_CFLAGS_COMPILE = optional speechdSupport "-I${speechd}/include/speech-dispatcher";
installPhase = ''
mkdir -p $out/share/applications
cp scripts/mumble.desktop $out/share/applications
# bin stuff
install -Dm755 release/mumble $out/bin/mumble
install -Dm755 scripts/mumble-overlay $out/bin/mumble-overlay
mkdir -p $out/share/icons{,/hicolor/scalable/apps}
cp icons/mumble.svg $out/share/icons
ln -s $out/share/icons/mumble.svg $out/share/icons/hicolor/scalable/apps
# lib stuff
mkdir -p $out/lib/mumble
cp -P release/libmumble.so* $out/lib
cp -P release/libcelt* $out/lib/mumble
cp -P release/plugins/* $out/lib/mumble
# icons
install -Dm644 scripts/mumble.desktop $out/share/applications/mumble.desktop
install -Dm644 icons/mumble.svg $out/share/icons/hicolor/scalable/apps/mumble.svg
'';
} source;
@ -110,6 +120,11 @@ let
];
buildInputs = [ libcap ] ++ optional iceSupport zeroc_ice;
installPhase = ''
# bin stuff
install -Dm755 release/murmurd $out/bin/murmurd
'';
};
stableSource = rec {
@ -138,26 +153,24 @@ let
];
};
gitSource = rec {
version = "2019-07-10";
rcSource = rec {
version = "1.3.0-rc2";
qtVersion = 5;
# Needs submodules
src = fetchFromGitHub {
owner = "mumble-voip";
repo = "mumble";
rev = "41b265584654c7ac216fd3ccb9c141734d3f839b";
rev = version;
sha256 = "00irlzz5q4drmsfbwrkyy7p7w8a5fc1ip5vyicq3g3cy58dprpqr";
fetchSubmodules = true;
};
};
in {
mumble = client stableSource;
mumble_git = client gitSource;
mumble_rc = client rcSource;
murmur = server stableSource;
murmur_git = (server gitSource).overrideAttrs (old: {
murmur_rc = (server rcSource).overrideAttrs (old: {
meta = old.meta // { broken = iceSupport; };
nativeBuildInputs = old.nativeBuildInputs or [] ++ [ qt5.wrapQtAppsHook ];
});
}

View file

@ -0,0 +1,28 @@
{ stdenv, fetchFromGitHub, cmake, pkgconfig
, mpg123, SDL2, gnome3, faad2, pcre
} :
stdenv.mkDerivation rec {
pname = "dablin";
version = "1.11.0";
src = fetchFromGitHub {
owner = "Opendigitalradio";
repo = "dablin";
rev = "${version}";
sha256 = "04ir7yg7psnnb48s1qfppvvx6lak4s8f6fqdg721y2kd9129jm82";
};
nativeBuildInputs = [ cmake pkgconfig ];
buildInputs = [ faad2 mpg123 SDL2 gnome3.gtkmm pcre ];
meta = with stdenv.lib; {
description = "Play DAB/DAB+ from ETI-NI aligned stream";
homepage = https://github.com/Opendigitalradio/dablin;
license = with licenses; [ gpl3 lgpl21 ];
platforms = platforms.linux;
maintainers = [ maintainers.markuskowa ];
};
}

View file

@ -0,0 +1,27 @@
{ stdenv, fetchFromGitHub, cmake, pkgconfig
, libusb1, rtl-sdr, fftw
} :
stdenv.mkDerivation rec {
pname = "dabtools";
version = "20180405";
src = fetchFromGitHub {
owner = "Opendigitalradio";
repo = "dabtools";
rev = "8b0b2258b02020d314efd4d0d33a56c8097de0d1";
sha256 = "18nkdybgg2w6zh56g6xwmg49sifalvraz4rynw8w5d8cqi3dm9sm";
};
nativeBuildInputs = [ cmake pkgconfig ];
buildInputs = [ rtl-sdr fftw libusb1 ];
meta = with stdenv.lib; {
description = "Commandline tools for DAB and DAB+ digital radio broadcasts";
homepage = "https://github.com/Opendigitalradio/dabtools";
license = licenses.gpl3Plus;
platforms = platforms.linux;
maintainers = [ maintainers.markuskowa ];
};
}

View file

@ -2,16 +2,17 @@
stdenv.mkDerivation rec {
name = "strelka-${version}";
version = "2.9.5";
version = "2.9.10";
src = fetchFromGitHub {
owner = "Illumina";
repo = "strelka";
rev = "v${version}";
sha256 = "0x4a6nkx1jnyag9svghsdjz1fz6q7qx5pn77wphdfnk81f9yspf8";
sha256 = "1nykbmim1124xh22nrhrsn8xgjb3s2y7akrdapn9sl1gdych4ppf";
};
buildInputs = [ cmake zlib python2 ];
nativeBuildInputs = [ cmake ];
buildInputs = [ zlib python2 ];
preConfigure = ''
sed -i 's|/usr/bin/env python|${python2}/bin/python|' src/python/lib/makeRunScript.py

View file

@ -1,4 +1,4 @@
{ stdenv, fetchFromGitHub, pythonPackages, gettext, git }:
{ stdenv, fetchFromGitHub, pythonPackages, gettext, git, qt5 }:
let
inherit (pythonPackages) buildPythonApplication pyqt5 sip pyinotify;
@ -16,9 +16,16 @@ in buildPythonApplication rec {
buildInputs = [ git gettext ];
propagatedBuildInputs = [ pyqt5 sip pyinotify ];
nativeBuildInputs = [ qt5.wrapQtAppsHook ];
doCheck = false;
postFixup = ''
wrapQtApp bin/git-cola
wrapQtApp bin/git-dag
'';
meta = with stdenv.lib; {
homepage = https://github.com/git-cola/git-cola;
description = "A sleek and powerful Git GUI";

View file

@ -8,13 +8,13 @@ with stdenv.lib;
buildGoPackage rec {
pname = "gitea";
version = "1.8.3";
version = "1.9.0";
src = fetchFromGitHub {
owner = "go-gitea";
repo = "gitea";
rev = "v${version}";
sha256 = "1q3wslf9s4dg7h1f41rh9rb7qlbsqz8k3xffmlzdbbgfdrm7sym1";
sha256 = "1z7rkhxkymv7rgc7blh9ps5sqrgl4sryf0rqcp16nh9n5snfm1rm";
# Required to generate the same checksum on MacOS due to unicode encoding differences
# More information: https://github.com/NixOS/nixpkgs/pull/48128
extraPostFetch = ''

View file

@ -285,7 +285,7 @@
type = "git";
url = "https://github.com/libgit2/git2go";
rev = "ecaeb7a21d47";
sha256 = "1sh30jnzjag7ddhr4if65j8vpcpj4rw93sf1g033jf91flrzyx23";
sha256 = "14r7ryff93r49g94f6kg66xc0y6rwb31lj22s3qmzmlgywk0pgvr";
};
}
{
@ -294,7 +294,7 @@
type = "git";
url = "https://github.com/lightstep/lightstep-tracer-go";
rev = "v0.15.6";
sha256 = "0g5bh3xdrsz30npk79h5ia340xyw97424xfrfzv3acqw3qg2sqn8";
sha256 = "10n5r66g44s6rnz5kf86s4a3p1g55kc1kxqhnk7bx7mlayndgpmb";
};
}
{
@ -483,7 +483,7 @@
type = "git";
url = "https://github.com/uber/jaeger-client-go";
rev = "v2.15.0";
sha256 = "1qvqkf20dp5fyfg7qd3jc29q1yv0qjz2mkxa02j1v3n8ka134rff";
sha256 = "0ki23m9zrf3vxp839fnp9ckr4m28y6mpad8g5s5lr5k8jkl0sfwj";
};
}
{

View file

@ -56,5 +56,8 @@ stdenv.mkDerivation rec {
license = stdenv.lib.licenses.gpl2;
maintainers = with stdenv.lib.maintainers; [ flosse ];
platforms = stdenv.lib.platforms.unix;
# TODO: The software is deprecated and the build is broken, see:
# https://github.com/NixOS/nixpkgs/pull/63260#issuecomment-503506487
broken = true;
};
}

View file

@ -1,40 +0,0 @@
source $stdenv/setup
set -x
lib=" \
makemkv-oss-${ver}/out/libdriveio.so.0 \
makemkv-oss-${ver}/out/libmakemkv.so.1 \
makemkv-oss-${ver}/out/libmmbd.so.0 \
"
bin=" \
makemkv-oss-${ver}/out/makemkv \
makemkv-bin-${ver}/bin/amd64/makemkvcon \
"
tar xzf ${src_bin}
tar xzf ${src_oss}
(
cd makemkv-oss-${ver}
./configure --prefix=$out
make
)
chmod +x ${bin}
libPath="${libPath}:${out}/lib" # XXX: der. This should be in the nix file?
for i in ${bin} ; do
patchelf \
--interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" \
--set-rpath $libPath \
${i}
done
mkdir -p $out/bin
mkdir -p $out/lib
mkdir -p $out/share/MakeMKV
cp ${lib} ${out}/lib
cp ${bin} ${out}/bin
cp makemkv-bin-${ver}/src/share/* $out/share/MakeMKV

View file

@ -1,34 +1,45 @@
{ stdenv, fetchurl
, openssl, qt5, libGLU_combined, zlib, pkgconfig, libav
{ stdenv, mkDerivation, fetchurl, autoPatchelfHook
, ffmpeg, openssl, qtbase, zlib, pkgconfig
}:
stdenv.mkDerivation rec {
name = "makemkv-${ver}";
ver = "1.14.4";
builder = ./builder.sh;
let
version = "1.14.4";
# Using two URLs as the first one will break as soon as a new version is released
src_bin = fetchurl {
urls = [
"http://www.makemkv.com/download/makemkv-bin-${ver}.tar.gz"
"http://www.makemkv.com/download/old/makemkv-bin-${ver}.tar.gz"
"http://www.makemkv.com/download/makemkv-bin-${version}.tar.gz"
"http://www.makemkv.com/download/old/makemkv-bin-${version}.tar.gz"
];
sha256 = "0vmmvldmwmq9g202abblj6l15kb8z3b0c6mcc03f30s2yci6ij33";
};
src_oss = fetchurl {
urls = [
"http://www.makemkv.com/download/makemkv-oss-${ver}.tar.gz"
"http://www.makemkv.com/download/old/makemkv-oss-${ver}.tar.gz"
"http://www.makemkv.com/download/makemkv-oss-${version}.tar.gz"
"http://www.makemkv.com/download/old/makemkv-oss-${version}.tar.gz"
];
sha256 = "0n1nlq17dxcbgk9xqf7nv6zykvh91yhsjqdhq55947wc11fxjqa0";
};
in mkDerivation {
pname = "makemkv";
inherit version;
nativeBuildInputs = [ pkgconfig ];
buildInputs = [openssl qt5.qtbase libGLU_combined zlib libav];
srcs = [ src_bin src_oss ];
libPath = stdenv.lib.makeLibraryPath [stdenv.cc.cc openssl libGLU_combined qt5.qtbase zlib ]
+ ":" + stdenv.cc.cc + "/lib64";
sourceRoot = "makemkv-oss-${version}";
nativeBuildInputs = [ autoPatchelfHook pkgconfig ];
buildInputs = [ ffmpeg openssl qtbase zlib ];
installPhase = ''
runHook preInstall
install -Dm555 -t $out/bin out/makemkv ../makemkv-bin-${version}/bin/amd64/makemkvcon
install -D -t $out/lib out/lib{driveio,makemkv,mmbd}.so.*
install -D -t $out/share/MakeMKV ../makemkv-bin-${version}/src/share/*
runHook postInstall
'';
meta = with stdenv.lib; {
description = "Convert blu-ray and dvd to mkv";

View file

@ -1,4 +1,5 @@
{ config, stdenv
, mkDerivation
, fetchFromGitHub
, cmake
, fdk_aac
@ -34,7 +35,7 @@
let
optional = stdenv.lib.optional;
in stdenv.mkDerivation rec {
in mkDerivation rec {
name = "obs-studio-${version}";
version = "23.2.1";

View file

@ -16,7 +16,7 @@
postPatch = "substituteInPlace Makefile --replace libsystemd-daemon libsystemd";
buildInputs = [ fontconfig libjpeg libcap freetype ]
buildInputs = [ fontconfig libjpeg libcap freetype perl ]
++ lib.optional enableSystemd systemd
++ lib.optional enableBidi fribidi;

View file

@ -9,7 +9,7 @@
, libass, libva, libdvbpsi, libdc1394, libraw1394, libopus
, libvdpau, libsamplerate, live555, fluidsynth, wayland, wayland-protocols
, onlyLibVLC ? false
, withQt5 ? true, qtbase ? null, qtsvg ? null, qtx11extras ? null
, withQt5 ? true, qtbase ? null, qtsvg ? null, qtx11extras ? null, wrapQtAppsHook ? null
, jackSupport ? false
, removeReferencesTo
, chromecastSupport ? true, protobuf, libmicrodns
@ -21,7 +21,7 @@
with stdenv.lib;
assert (withQt5 -> qtbase != null && qtsvg != null && qtx11extras != null);
assert (withQt5 -> qtbase != null && qtsvg != null && qtx11extras != null && wrapQtAppsHook != null);
stdenv.mkDerivation rec {
name = "vlc-${version}";
@ -49,7 +49,8 @@ stdenv.mkDerivation rec {
++ optional jackSupport libjack2
++ optionals chromecastSupport [ protobuf libmicrodns ];
nativeBuildInputs = [ autoreconfHook perl pkgconfig removeReferencesTo ];
nativeBuildInputs = [ autoreconfHook perl pkgconfig removeReferencesTo ]
++ optionals withQt5 [ wrapQtAppsHook ];
enableParallelBuilding = true;

View file

@ -5,13 +5,13 @@
buildGoPackage rec {
name = "podman-${version}";
version = "1.3.2";
version = "1.4.4";
src = fetchFromGitHub {
owner = "containers";
repo = "libpod";
rev = "v${version}";
sha256 = "1j5n08273igj6wm9rrwks9nnklv91060bn1yv3ak78csxc05whs3";
sha256 = "13qgrvqawrrz4apdcds4amkljyjzx056545962wk8p0d291hqv5a";
};
goPackagePath = "github.com/containers/libpod";
@ -39,7 +39,7 @@ buildGoPackage rec {
homepage = https://podman.io/;
description = "A program for managing pods, containers and container images";
license = licenses.asl20;
maintainers = with maintainers; [ vdemeester ];
maintainers = with maintainers; [ vdemeester saschagrunert ];
platforms = platforms.linux;
};
}

View file

@ -4,13 +4,13 @@
stdenv.mkDerivation rec {
name = "bspwm-${version}";
version = "0.9.7";
version = "0.9.8";
src = fetchFromGitHub {
owner = "baskerville";
repo = "bspwm";
rev = version;
sha256 = "17cfvbrvzwwr9r72xgpn144k45xavzi0hnl2qqp9lhxflvirac0c";
sha256 = "1vc4pdm4fwb5gz7hyzwvjqkx5087f0vrw11898nq1s7kxzl2lhbx";
};
buildInputs = [ libxcb libXinerama xcbutil xcbutilkeysyms xcbutilwm ];

View file

@ -9,19 +9,19 @@ in
rustPlatform.buildRustPackage rec {
name = "dwm-status-${version}";
version = "1.6.0";
version = "1.6.2";
src = fetchFromGitHub {
owner = "Gerschtli";
repo = "dwm-status";
rev = version;
sha256 = "02gvlxv6ylx4mdkf59crm2zyahiz1zd4cr5zz29dnhx7r7738i9a";
sha256 = "16vf7val1isc4227amng2ap9af34xa2va23dxv43px006xhrar78";
};
nativeBuildInputs = [ makeWrapper pkgconfig ];
buildInputs = [ dbus gdk-pixbuf libnotify xorg.libX11 ];
cargoSha256 = "1r2wczfkdpvjc7iylwajkminraaz1ix6n724in0dvv5klfcdxlxb";
cargoSha256 = "0pprf8509d321azg2l51lpxylgpk7290y38z9p5hxgkcwhrhrcss";
postInstall = lib.optionalString (bins != []) ''
wrapProgram $out/bin/dwm-status --prefix "PATH" : "${stdenv.lib.makeBinPath bins}"

View file

@ -4,7 +4,7 @@
, lib
}:
args@{ name, bazelFlags ? [], bazelTarget, buildAttrs, fetchAttrs, ... }:
args@{ name, bazelFlags ? [], bazelBuildFlags ? [], bazelFetchFlags ? [], bazelTarget, buildAttrs, fetchAttrs, ... }:
let
fArgs = removeAttrs args [ "buildAttrs" "fetchAttrs" ];
@ -12,11 +12,11 @@ let
fFetchAttrs = fArgs // removeAttrs fetchAttrs [ "sha256" ];
in stdenv.mkDerivation (fBuildAttrs // {
inherit name bazelFlags bazelTarget;
inherit name bazelFlags bazelBuildFlags bazelFetchFlags bazelTarget;
deps = stdenv.mkDerivation (fFetchAttrs // {
name = "${name}-deps";
inherit bazelFlags bazelTarget;
inherit bazelFlags bazelBuildFlags bazelFetchFlags bazelTarget;
nativeBuildInputs = fFetchAttrs.nativeBuildInputs or [] ++ [ bazel ];
@ -49,6 +49,7 @@ in stdenv.mkDerivation (fBuildAttrs // {
fetch \
--loading_phase_threads=1 \
$bazelFlags \
$bazelFetchFlags \
$bazelTarget
runHook postBuild
@ -60,13 +61,10 @@ in stdenv.mkDerivation (fBuildAttrs // {
# Remove all built in external workspaces, Bazel will recreate them when building
rm -rf $bazelOut/external/{bazel_tools,\@bazel_tools.marker}
rm -rf $bazelOut/external/{embedded_jdk,\@embedded_jdk.marker}
rm -rf $bazelOut/external/{local_*,\@local_*}
rm -rf $bazelOut/external/{local_*,\@local_*.marker}
# Patching markers to make them deterministic
find $bazelOut/external -name '@*\.marker' -exec sed -i \
-e 's, -\?[0-9][0-9]*$, 1,' \
-e '/^ENV:TMP.*/d' \
'{}' \;
# Clear markers
find $bazelOut/external -name '@*\.marker' -exec sh -c 'echo > {}' \;
# Remove all vcs files
rm -rf $(find $bazelOut/external -type d -name .git)
@ -152,6 +150,7 @@ in stdenv.mkDerivation (fBuildAttrs // {
"''${host_linkopts[@]}" \
'' + ''
$bazelFlags \
$bazelBuildFlags \
$bazelTarget
runHook postBuild

View file

@ -1,37 +1,41 @@
{
symlinkJoin,
cacert,
callPackage,
closureInfo,
coreutils,
docker,
e2fsprogs,
findutils,
go,
jshon,
jq,
jshon,
lib,
pkgs,
pigz,
moreutils,
nix,
runCommand,
pigz,
referencesByPopularity,
rsync,
runCommand,
runtimeShell,
shadow,
skopeo,
stdenv,
storeDir ? builtins.storeDir,
substituteAll,
symlinkJoin,
utillinux,
vmTools,
writeReferencesToFile,
referencesByPopularity,
writeScript,
writeText,
closureInfo,
substituteAll,
runtimeShell
}:
# WARNING: this API is unstable and may be subject to backwards-incompatible changes in the future.
rec {
examples = import ./examples.nix {
inherit pkgs buildImage pullImage shadowSetup buildImageWithNixDb;
examples = callPackage ./examples.nix {
inherit buildImage pullImage shadowSetup buildImageWithNixDb;
};
pullImage = let
@ -57,13 +61,13 @@ rec {
inherit imageDigest;
imageName = finalImageName;
imageTag = finalImageTag;
impureEnvVars = pkgs.stdenv.lib.fetchers.proxyImpureEnvVars;
impureEnvVars = stdenv.lib.fetchers.proxyImpureEnvVars;
outputHashMode = "flat";
outputHashAlgo = "sha256";
outputHash = sha256;
nativeBuildInputs = lib.singleton (pkgs.skopeo);
SSL_CERT_FILE = "${pkgs.cacert.out}/etc/ssl/certs/ca-bundle.crt";
nativeBuildInputs = lib.singleton skopeo;
SSL_CERT_FILE = "${cacert.out}/etc/ssl/certs/ca-bundle.crt";
sourceURL = "docker://${imageName}@${imageDigest}";
destNameTag = "${finalImageName}:${finalImageTag}";
@ -156,7 +160,8 @@ rec {
postMount ? "",
postUmount ? ""
}:
vmTools.runInLinuxVM (
let
result = vmTools.runInLinuxVM (
runCommand name {
preVM = vmTools.createEmptyImage {
size = diskSize;
@ -166,8 +171,6 @@ rec {
nativeBuildInputs = [ utillinux e2fsprogs jshon rsync jq ];
} ''
rm -rf $out
mkdir disk
mkfs /dev/${vmTools.hd}
mount /dev/${vmTools.hd} disk
@ -250,6 +253,12 @@ rec {
${postUmount}
'');
in
runCommand name {} ''
mkdir -p $out
cd ${result}
cp layer.tar json VERSION $out
'';
exportImage = { name ? fromImage.name, fromImage, fromImageName ? null, fromImageTag ? null, diskSize ? 1024 }:
runWithOverlay {
@ -489,7 +498,7 @@ rec {
(cd layer; ${extraCommandsScript})
echo "Packing layer..."
mkdir $out
mkdir -p $out
tar -C layer --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" -cf $out/layer.tar .
# Compute the tar checksum and add it to the output json.
@ -670,7 +679,7 @@ rec {
extraCommands;
};
result = runCommand "docker-image-${baseName}.tar.gz" {
nativeBuildInputs = [ jshon pigz coreutils findutils jq ];
nativeBuildInputs = [ jshon pigz coreutils findutils jq moreutils ];
# Image name and tag must be lowercase
imageName = lib.toLower name;
imageTag = if tag == null then "" else lib.toLower tag;
@ -784,7 +793,7 @@ rec {
# originally this used `sed -i "1i$layerID" layer-list`, but
# would fail if layer-list was completely empty.
echo "$layerID/layer.tar"
) | ${pkgs.moreutils}/bin/sponge layer-list
) | sponge layer-list
# Create image json and image manifest
imageJson=$(cat ${baseJson} | jq ". + {\"rootfs\": {\"diff_ids\": [], \"type\": \"layers\"}}")

View file

@ -14,6 +14,7 @@
, cargoDepsHook ? ""
, cargoBuildFlags ? []
, buildType ? "release"
, meta ? {}
, cargoVendorDir ? null
, ... } @ args:
@ -45,7 +46,6 @@ let
ccForHost="${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc";
cxxForHost="${stdenv.cc}/bin/${stdenv.cc.targetPrefix}c++";
releaseDir = "target/${stdenv.hostPlatform.config}/${buildType}";
in stdenv.mkDerivation (args // {
inherit cargoDeps;
@ -103,7 +103,7 @@ in stdenv.mkDerivation (args // {
"CC_${stdenv.hostPlatform.config}"="${ccForHost}" \
"CXX_${stdenv.hostPlatform.config}"="${cxxForHost}" \
cargo build \
--${buildType} \
${stdenv.lib.optionalString (buildType == "release") "--release"} \
--target ${stdenv.hostPlatform.config} \
--frozen ${concatStringsSep " " cargoBuildFlags}
)
@ -147,4 +147,9 @@ in stdenv.mkDerivation (args // {
'';
passthru = { inherit cargoDeps; } // (args.passthru or {});
meta = {
# default to Rust's platforms
platforms = rustc.meta.platforms;
} // meta;
})

View file

@ -1,30 +1,45 @@
{ lib, fetchFromGitHub }:
let
version = "5.9.0";
in fetchFromGitHub rec {
name = "font-awesome-${version}";
font-awesome = { version, sha256, rev ? version}: fetchFromGitHub rec {
name = "font-awesome-${version}";
owner = "FortAwesome";
repo = "Font-Awesome";
rev = version;
postFetch = ''
tar xf $downloadedFile --strip=1
install -m444 -Dt $out/share/fonts/opentype otfs/*.otf
'';
owner = "FortAwesome";
repo = "Font-Awesome";
inherit rev;
sha256 = "0sz7mn7g968vp5hszs05grpphd7zr3073az8lyy1lj0096zvjjii";
meta = with lib; {
description = "Font Awesome - OTF font";
longDescription = ''
Font Awesome gives you scalable vector icons that can instantly be customized.
This package includes only the OTF font. For full CSS etc. see the project website.
postFetch = ''
tar xf $downloadedFile --strip=1
install -m444 -Dt $out/share/fonts/opentype {fonts,otfs}/*.otf
'';
homepage = http://fortawesome.github.io/Font-Awesome/;
license = licenses.ofl;
platforms = platforms.all;
maintainers = with maintainers; [ abaldeau ];
inherit sha256;
meta = with lib; {
description = "Font Awesome - OTF font";
longDescription = ''
Font Awesome gives you scalable vector icons that can instantly be customized.
This package includes only the OTF font. For full CSS etc. see the project website.
'';
homepage = "http://fortawesome.github.io/Font-Awesome/";
license = licenses.ofl;
platforms = platforms.all;
maintainers = with maintainers; [ abaldeau johnazoidberg ];
};
};
in {
# Keeping version 4 because version 5 is incompatible for some icons. That
# means that projects which depend on it need to actively convert the
# symbols. See:
# https://github.com/greshake/i3status-rust/issues/130
# https://fontawesome.com/how-to-use/on-the-web/setup/upgrading-from-version-4
v4 = font-awesome {
version = "4.7.0";
rev = "v4.7.0";
sha256 = "1j8i32dq6rrlv3kf2hnq81iqks06kczaxjks7nw3zyq1231winm9";
};
v5 = font-awesome {
version = "5.10.0";
sha256 = "11nga1drlpkrmw307ga6plbj5z1b70cnckr465z8z6vkbcd6jkv3";
};
}

View file

@ -19,13 +19,13 @@
stdenv.mkDerivation rec {
pname = "plata-theme";
version = "0.8.7";
version = "0.8.8";
src = fetchFromGitLab {
owner = "tista500";
repo = "plata-theme";
rev = version;
sha256 = "1rn51yj7f7bclvrwwqwid4z9cpap4yd0zw0xs08c36zcjmr28426";
sha256 = "1xb28s67lnsphj97r15jxlfgydyrxdby1d2z5y3g9wniw6z19i9n";
};
preferLocalBuild = true;

View file

@ -345,8 +345,6 @@ lib.makeScope pkgs.newScope (self: with self; {
nautilus-python = callPackage ./misc/nautilus-python { };
pidgin-im-gnome-shell-extension = callPackage ./misc/pidgin { };
gtkhtml = callPackage ./misc/gtkhtml { enchant = pkgs.enchant1; };
pomodoro = callPackage ./misc/pomodoro { };
@ -398,4 +396,6 @@ lib.makeScope pkgs.newScope (self: with self; {
gtk = gtk3;
gtkmm = gtkmm3;
rest = librest;
pidgin-im-gnome-shell-extension = pkgs.gnomeExtensions.pidgin-im-integration; # added 2019-08-01
})

View file

@ -0,0 +1,31 @@
{ stdenv, fetchFromGitHub, glib }:
stdenv.mkDerivation rec {
pname = "gnome-shell-extension-pidgin-im-integration";
version = "32";
src = fetchFromGitHub {
owner = "muffinmad";
repo = "pidgin-im-gnome-shell-extension";
rev = "v${version}";
sha256 = "1jyg8r0s1v83sgg6y0jbsj2v37mglh8rvd8vi27fxnjq9xmg8kpc";
};
dontConfigure = true;
dontBuild = true;
installPhase = ''
share_dir="$prefix/share"
extensions_dir="$share_dir/gnome-shell/extensions/pidgin@muffinmad"
mkdir -p "$extensions_dir"
mv *.js metadata.json dbus.xml schemas locale "$extensions_dir"
'';
meta = with stdenv.lib; {
homepage = https://github.com/muffinmad/pidgin-im-gnome-shell-extension;
description = "Make Pidgin IM conversations appear in the Gnome Shell message tray";
license = licenses.gpl2;
platforms = platforms.linux;
maintainers = with maintainers; [ ];
};
}

View file

@ -1,42 +0,0 @@
{ stdenv, fetchFromGitHub, glib }:
stdenv.mkDerivation rec {
version = "1.0.1";
basename = "pidgin-im-gnome-shell-extension";
name = "${basename}-${version}";
src = fetchFromGitHub {
owner = "muffinmad";
repo = "${basename}";
rev = "v${version}";
sha256 = "1567s2sfqig4jw0nrn134f5vkx0yq31q044grv3xk4vpl1f3z2lr";
};
buildInputs = [ glib ];
configurePhase = "";
buildPhase = "";
installPhase = ''
share_dir="$prefix/share"
extensions_dir="$share_dir/gnome-shell/extensions/pidgin@muffinmad"
mkdir -p "$extensions_dir"
mv *.js metadata.json dbus.xml gnome-shell-extension-pidgin.pot "$extensions_dir"
schemas_dir="$share_dir/gsettings-schemas/${name}/glib-2.0/schemas"
mkdir -p "$schemas_dir"
mv schemas/* "$schemas_dir" # fix Emacs syntax highlighting: */
glib-compile-schemas "$schemas_dir"
locale_dir="$share_dir/locale"
mkdir -p "$locale_dir"
mv locale/* $locale_dir # fix Emacs syntax highlighting: */
'';
meta = with stdenv.lib; {
homepage = https://github.com/muffinmad/pidgin-im-gnome-shell-extension;
description = "Make Pidgin IM conversations appear in the Gnome Shell message tray";
license = licenses.gpl2;
platforms = platforms.linux;
maintainers = with maintainers; [ ];
};
}

Some files were not shown because too many files have changed in this diff Show more