Merge remote-tracking branch 'origin/master' into staging-next

Conflicts:
	pkgs/development/tools/rust/cargo-cache/default.nix
	pkgs/development/tools/rust/cargo-embed/default.nix
	pkgs/development/tools/rust/cargo-flash/default.nix
	pkgs/servers/nosql/influxdb2/default.nix
This commit is contained in:
Jonathan Ringer 2021-05-17 07:01:38 -07:00
commit c227fb4b17
No known key found for this signature in database
GPG key ID: 5C841D3CFDFEC4E0
113 changed files with 1041 additions and 252 deletions

View file

@ -50,3 +50,50 @@ Many more commands wrap `writeTextFile` including `writeText`, `writeTextDir`, `
## `symlinkJoin` {#trivial-builder-symlinkJoin}
This can be used to put many derivations into the same directory structure. It works by creating a new derivation and adding symlinks to each of the paths listed. It expects two arguments, `name`, and `paths`. `name` is the name used in the Nix store path for the created derivation. `paths` is a list of paths that will be symlinked. These paths can be to Nix store derivations or any other subdirectory contained within.
## `writeReferencesToFile` {#trivial-builder-writeReferencesToFile}
Writes the closure of transitive dependencies to a file.
This produces the equivalent of `nix-store -q --requisites`.
For example,
```nix
writeReferencesToFile (writeScriptBin "hi" ''${hello}/bin/hello'')
```
produces an output path `/nix/store/<hash>-runtime-deps` containing
```nix
/nix/store/<hash>-hello-2.10
/nix/store/<hash>-hi
/nix/store/<hash>-libidn2-2.3.0
/nix/store/<hash>-libunistring-0.9.10
/nix/store/<hash>-glibc-2.32-40
```
You can see that this includes `hi`, the original input path,
`hello`, which is a direct reference, but also
the other paths that are indirectly required to run `hello`.
## `writeDirectReferencesToFile` {#trivial-builder-writeDirectReferencesToFile}
Writes the set of references to the output file, that is, their immediate dependencies.
This produces the equivalent of `nix-store -q --references`.
For example,
```nix
writeDirectReferencesToFile (writeScriptBin "hi" ''${hello}/bin/hello'')
```
produces an output path `/nix/store/<hash>-runtime-references` containing
```nix
/nix/store/<hash>-hello-2.10
```
but none of `hello`'s dependencies, because those are not referenced directly
by `hi`'s output.

View file

@ -5,7 +5,7 @@ support for importing Dhall expressions, which is documented here:
* [`dhall-lang.org` - Installing packages](https://docs.dhall-lang.org/tutorials/Language-Tour.html#installing-packages)
## Remote imports
## Remote imports {#ssec-dhall-remote-imports}
Nixpkgs bypasses Dhall's support for remote imports using Dhall's
semantic integrity checks. Specifically, any Dhall import can be protected by
@ -32,7 +32,7 @@ example, the Prelude Dhall package uses `pkgs.fetchFromGitHub` to fetch the
to fetch Dhall code ensures that Dhall packages built using Nix remain pure and
also behave well when built within a sandbox.
## Packaging a Dhall expression from scratch
## Packaging a Dhall expression from scratch {#ssec-dhall-packaging-expression}
We can illustrate how Nixpkgs integrates Dhall by beginning from the following
trivial Dhall expression with one dependency (the Prelude):
@ -117,7 +117,7 @@ in
$ nix build --file ./example.nix dhallPackages.true
```
## Contents of a Dhall package
## Contents of a Dhall package {#ssec-dhall-package-contents}
The above package produces the following directory tree:
@ -224,7 +224,7 @@ $ cat ./result/source.dhall
```
## Packaging functions
## Packaging functions {#ssec-dhall-packaging-functions}
We already saw an example of using `buildDhallPackage` to create a Dhall
package from a single file, but most Dhall packages consist of more than one
@ -297,7 +297,7 @@ terms of `buildDhallPackage` that accepts the following arguments:
Additionally, `buildDhallGitHubPackage` accepts the same arguments as
`fetchFromGitHub`, such as `sha256` or `fetchSubmodules`.
## `dhall-to-nixpkgs`
## `dhall-to-nixpkgs` {#ssec-dhall-dhall-to-nixpkgs}
You can use the `dhall-to-nixpkgs` command-line utility to automate
packaging Dhall code. For example:
@ -342,7 +342,7 @@ $ dhall-to-nixpkgs directory ~/proj/dhall-semver
}
```
## Overriding dependency versions
## Overriding dependency versions {#ssec-dhall-overriding-dependency-versions}
Suppose that we change our `true.dhall` example expression to depend on an older
version of the Prelude (19.0.0):
@ -415,7 +415,7 @@ like this:
};
```
## Overrides
## Overrides {#ssec-dhall-overrides}
You can override any of the arguments to `buildDhallGitHubPackage` or
`buildDhallDirectoryPackage` using the `overridePackage` attribute of a package.

View file

@ -2511,6 +2511,12 @@
githubId = 1316469;
name = "Naomi Morse";
};
dlesl = {
email = "dlesl@dlesl.com";
github = "dlesl";
githubId = 28980797;
name = "David Leslie";
};
dmalikov = {
email = "malikov.d.y@gmail.com";
github = "dmalikov";

View file

@ -54,6 +54,8 @@ in
services.dbus.packages = [ pkgs.dconf ];
systemd.packages = [ pkgs.dconf ];
# For dconf executable
environment.systemPackages = [ pkgs.dconf ];

View file

@ -10,15 +10,10 @@ let
blacklist = cfg.caCertificateBlacklist;
};
caCertificates = pkgs.runCommand "ca-certificates.crt"
{ files =
cfg.certificateFiles ++
[ (builtins.toFile "extra.crt" (concatStringsSep "\n" cfg.certificates)) ];
caCertificates = pkgs.runCommand "ca-certificates.crt" {
files = cfg.certificateFiles ++ [ (builtins.toFile "extra.crt" (concatStringsSep "\n" cfg.certificates)) ];
preferLocalBuild = true;
}
''
cat $files > $out
'';
} "awk 1 $files > $out"; # awk ensures a newline between each pair of consecutive files
in

View file

@ -159,7 +159,7 @@ in
For more information on how to specify the target
and on which privileges exist, see the
<link xlink:href="https://www.postgresql.org/docs/current/sql-grant.html">GRANT syntax</link>.
The attributes are used as <code>GRANT ''${attrName} ON ''${attrValue}</code>.
The attributes are used as <code>GRANT ''${attrValue} ON ''${attrName}</code>.
'';
example = literalExample ''
{

View file

@ -331,7 +331,7 @@ in {
PrivateMounts = true;
# System Call Filtering
SystemCallArchitectures = "native";
SystemCallFilter = "~@clock @cpu-emulation @debug @keyring @memlock @module @mount @obsolete @privileged @raw-io @reboot @resources @setuid @swap";
SystemCallFilter = "~@cpu-emulation @debug @keyring @memlock @mount @obsolete @privileged @resources @setuid";
};
};
};

View file

@ -131,7 +131,7 @@ in {
env = (pkgs.python3.withPackages (pythonPackages: with pythonPackages; [
ipykernel
pandas
scikitlearn
scikit-learn
]));
in {
displayName = "Python 3 for machine learning";

View file

@ -117,7 +117,7 @@ in {
env = (pkgs.python3.withPackages (pythonPackages: with pythonPackages; [
ipykernel
pandas
scikitlearn
scikit-learn
]));
in {
displayName = "Python 3 for machine learning";

View file

@ -31,16 +31,6 @@ in
config = mkIf cfg.enable {
users = {
groups.lm_sensors = {};
users.fancontrol = {
isSystemUser = true;
group = "lm_sensors";
description = "fan speed controller";
};
};
systemd.services.fancontrol = {
documentation = [ "man:fancontrol(8)" ];
description = "software fan control";
@ -49,8 +39,6 @@ in
serviceConfig = {
ExecStart = "${pkgs.lm_sensors}/sbin/fancontrol ${configFile}";
Group = "lm_sensors";
User = "fancontrol";
};
};
};

View file

@ -92,9 +92,7 @@ in
SystemCallErrorNumber = "EPERM";
SystemCallFilter = [
"@system-service"
"~@chown" "~@cpu-emulation" "~@debug" "~@keyring" "~@memlock" "~@module"
"~@obsolete" "~@privileged" "~@setuid"
"~@cpu-emulation" "~@debug" "~@keyring" "~@memlock" "~@obsolete" "~@privileged" "~@setuid"
];
};
};

View file

@ -117,7 +117,7 @@ in {
PrivateMounts = true;
# System Call Filtering
SystemCallArchitectures = "native";
SystemCallFilter = "~@clock @cpu-emulation @debug @module @mount @obsolete @privileged @raw-io @reboot @resources @swap";
SystemCallFilter = "~@cpu-emulation @debug @mount @obsolete @privileged @resources";
};
};
};

View file

@ -72,9 +72,7 @@ in
RuntimeDirectoryMode = "700";
SystemCallFilter = [
"@system-service"
"~@aio" "~@chown" "~@keyring" "~@memlock"
"~@privileged" "~@resources" "~@setuid"
"~@sync" "~@timer"
"~@aio" "~@keyring" "~@memlock" "~@privileged" "~@resources" "~@setuid" "~@sync" "~@timer"
];
SystemCallArchitectures = "native";
SystemCallErrorNumber = "EPERM";

View file

@ -86,10 +86,7 @@ in {
SystemCallErrorNumber = "EPERM";
SystemCallFilter = [
"@system-service"
"~@chown" "~@cpu-emulation" "~@debug" "~@keyring" "~@memlock"
"~@module" "~@obsolete" "~@privileged" "~@raw-io"
"~@resources" "~@setuid"
"~@cpu-emulation" "~@debug" "~@keyring" "~@memlock" "~@obsolete" "~@privileged" "~@resources" "~@setuid"
];
};
};

View file

@ -41,7 +41,6 @@ in {
As an example:
<programlisting>
security.acme.certs."example.com".allowKeysForGroup = true;
systemd.services.molly-brown.serviceConfig.SupplementaryGroups =
[ config.security.acme.certs."example.com".group ];
</programlisting>

View file

@ -859,7 +859,7 @@ in
PrivateMounts = true;
# System Call Filtering
SystemCallArchitectures = "native";
SystemCallFilter = "~@chown @cpu-emulation @debug @keyring @ipc @module @mount @obsolete @privileged @raw-io @reboot @setuid @swap";
SystemCallFilter = "~@cpu-emulation @debug @keyring @ipc @mount @obsolete @privileged @setuid";
};
};
@ -867,8 +867,9 @@ in
source = configFile;
};
# postRun hooks on cert renew can't be used to restart Nginx since renewal
# runs as the unprivileged acme user. sslTargets are added to wantedBy + before
# This service waits for all certificates to be available
# before reloading nginx configuration.
# sslTargets are added to wantedBy + before
# which allows the acme-finished-$cert.target to signify the successful updating
# of certs end-to-end.
systemd.services.nginx-config-reload = let

View file

@ -309,7 +309,7 @@ in
"mount-pstore" = {
serviceConfig = {
Type = "oneshot";
ExecStart = "${pkgs.utillinux}/bin/mount -t pstore -o nosuid,noexec,nodev pstore /sys/fs/pstore";
ExecStart = "${pkgs.util-linux}/bin/mount -t pstore -o nosuid,noexec,nodev pstore /sys/fs/pstore";
ExecStartPost = pkgs.writeShellScript "wait-for-pstore.sh" ''
set -eu
TRIES=0

View file

@ -9,13 +9,13 @@
mkDerivation rec {
pname = "spotify-qt";
version = "3.5";
version = "3.6";
src = fetchFromGitHub {
owner = "kraxarn";
repo = pname;
rev = "v${version}";
sha256 = "1bgd0q4sbbww3lbrx2zwgaz0sl7qh195s4kvgsq16gv7ij82bskn";
sha256 = "mKHyE6ZffMYYRLMpzMX53chyJyWxhTAaGvtBI3l6wkI=";
};
buildInputs = [ libxcb qtbase qtsvg ];

View file

@ -2,13 +2,13 @@
mkDerivation rec {
pname = "ghostwriter";
version = "2.0.0";
version = "2.0.1";
src = fetchFromGitHub {
owner = "wereturtle";
repo = pname;
rev = version;
sha256 = "sha256-5O2W7ZQeDkNzwi6t9MfNbv4fmNvak1AcMnzJTE1F9L8=";
sha256 = "sha256-bNVhYwX60F3lrP9UmZSntfz83vbmHe9tu/4nUgzUWR4=";
};
nativeBuildInputs = [ qmake pkg-config qttools ];

View file

@ -23,6 +23,7 @@
, qtsvg
, qtx11extras
, quazip
, readline
, wrapQtAppsHook
, yubikey-personalization
, zlib
@ -51,13 +52,13 @@ stdenv.mkDerivation rec {
sha256 = "02ajfkw818cmalvkl0kqvza85rgdgs59kw2v7b3c4v8kv00c41j3";
};
NIX_CFLAGS_COMPILE = lib.optionalString stdenv.cc.isClang [
NIX_CFLAGS_COMPILE = optionalString stdenv.cc.isClang [
"-Wno-old-style-cast"
"-Wno-error"
"-D__BIG_ENDIAN__=${if stdenv.isBigEndian then "1" else "0"}"
];
NIX_LDFLAGS = lib.optionalString stdenv.isDarwin "-rpath ${libargon2}/lib";
NIX_LDFLAGS = optionalString stdenv.isDarwin "-rpath ${libargon2}/lib";
patches = [
./darwin.patch
@ -108,12 +109,14 @@ stdenv.mkDerivation rec {
qtbase
qtsvg
qtx11extras
readline
yubikey-personalization
zlib
]
++ lib.optional withKeePassKeeShareSecure quazip
++ lib.optional stdenv.isDarwin qtmacextras
++ lib.optional (stdenv.isDarwin && withKeePassTouchID) darwin.apple_sdk.frameworks.LocalAuthentication;
++ optional withKeePassKeeShareSecure quazip
++ optional stdenv.isDarwin qtmacextras
++ optional (stdenv.isDarwin && withKeePassTouchID)
darwin.apple_sdk.frameworks.LocalAuthentication;
preFixup = optionalString stdenv.isDarwin ''
# Make it work without Qt in PATH.
@ -123,8 +126,14 @@ stdenv.mkDerivation rec {
passthru.tests = nixosTests.keepassxc;
meta = {
description = "Password manager to store your passwords safely and auto-type them into your everyday websites and applications";
longDescription = "A community fork of KeePassX, which is itself a port of KeePass Password Safe. The goal is to extend and improve KeePassX with new features and bugfixes to provide a feature-rich, fully cross-platform and modern open-source password manager. Accessible via native cross-platform GUI, CLI, and browser integration with the KeePassXC Browser Extension (https://github.com/keepassxreboot/keepassxc-browser).";
description = "Offline password manager with many features.";
longDescription = ''
A community fork of KeePassX, which is itself a port of KeePass Password Safe.
The goal is to extend and improve KeePassX with new features and bugfixes,
to provide a feature-rich, fully cross-platform and modern open-source password manager.
Accessible via native cross-platform GUI, CLI, has browser integration
using the KeePassXC Browser Extension (https://github.com/keepassxreboot/keepassxc-browser)
'';
homepage = "https://keepassxc.org/";
license = licenses.gpl2Plus;
maintainers = with maintainers; [ jonafato turion ];

View file

@ -1,18 +1,36 @@
{ lib, stdenv, fetchFromGitHub, cmake, pkg-config, wxGTK, gtk2, sfml, fluidsynth, curl, freeimage, ftgl, glew, zip }:
{ lib, stdenv, fetchFromGitHub
, cmake
, pkg-config
, wxGTK
, sfml
, fluidsynth
, curl
, freeimage
, ftgl
, glew
, zip
, lua
, fmt
, mpg123
}:
stdenv.mkDerivation {
name = "slade-git-3.1.2.2018.01.29";
name = "slade-git-3.2.0.2021.05.13";
src = fetchFromGitHub {
owner = "sirjuddington";
repo = "SLADE";
rev = "f7409c504b40c4962f419038db934c32688ddd2e";
sha256 = "14icxiy0r9rlcc10skqs1ylnxm1f0f3irhzfmx4sazq0pjv5ivld";
rev = "d2e249c89062a44c912a9b86951526edc8735ba0";
sha256 = "08dsvx7m7c97jm8fxzivmi1fr47hj53y0lv57clqc35bh2gi62dg";
};
cmakeFlags = ["-DNO_WEBVIEW=1"];
cmakeFlags = [
"-DwxWidgets_CONFIG_EXECUTABLE=${wxGTK}/bin/wx-config"
"-DWX_GTK3=OFF"
"-DNO_WEBVIEW=1"
];
nativeBuildInputs = [ cmake pkg-config zip ];
buildInputs = [ wxGTK gtk2 sfml fluidsynth curl freeimage ftgl glew ];
buildInputs = [ wxGTK wxGTK.gtk sfml fluidsynth curl freeimage ftgl glew lua fmt mpg123 ];
meta = with lib; {
description = "Doom editor";

View file

@ -33,6 +33,7 @@
, libXrandr
, libXrender
, libXtst
, libxkbcommon
, libsecret
, libuuid
, libxcb
@ -81,6 +82,7 @@ let
libXrandr
libXrender
libXtst
libxkbcommon
libsecret
libuuid
libxcb
@ -97,11 +99,11 @@ let
in
stdenv.mkDerivation rec {
pname = "appgate-sdp";
version = "5.3.3";
version = "5.4.0";
src = fetchurl {
url = "https://bin.appgate-sdp.com/${lib.versions.majorMinor version}/client/appgate-sdp_${version}_amd64.deb";
sha256 = "1854m93mr2crg68zhh1pgwwis0dqdv0778wqrb8dz9sdz940rza8";
sha256 = "sha256-2DzZ5JnFGBeaHtDf7CAXb/qv6kVI+sYMW5Nc25E3eNA=";
};
dontConfigure = true;
@ -170,9 +172,14 @@ stdenv.mkDerivation rec {
patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" --set-rpath "$ORIGIN:$out/opt/appgate/service/:$out/opt/appgate/:${rpath}" $binary
done
# fail if there are missing dependencies
ldd $out/opt/appgate/appgate | grep -i 'not found' && exit 1
ldd $out/opt/appgate/service/appgateservice.bin | grep -i 'not found' && exit 1
ldd $out/opt/appgate/appgate-driver | grep -i 'not found' && exit 1
wrapProgram $out/opt/appgate/appgate-driver --prefix PATH : ${lib.makeBinPath [ iproute2 networkmanager dnsmasq ]}
wrapProgram $out/opt/appgate/linux/set_dns --set PYTHONPATH $PYTHONPATH
wrapProgram $out/bin/appgate --prefix PATH : ${xdg-utils}/bin
wrapProgram $out/bin/appgate --prefix PATH : ${lib.makeBinPath [ xdg-utils ]}
'';
meta = with lib; {
description = "Appgate SDP (Software Defined Perimeter) desktop client";

View file

@ -11,6 +11,13 @@ buildGoModule rec {
sha256 = "1iqgpmljqx6rhmvsir2675waj78amcfiw08knwvlmavjgpxx2ysw";
};
patches = [
# Use $HOME instead of the OS user database.
# Upstream PR: https://github.com/xetys/hetzner-kube/pull/346
# Unfortunately, the PR patch does not apply against release.
./fix-home.patch
];
vendorSha256 = "1jh2f66ys6rmrrwrf5zqfprgcvziyq6l4z8bfqwxgf1ysnxx525h";
doCheck = false;
@ -25,6 +32,8 @@ buildGoModule rec {
];
postInstall = ''
# Need a writable home, because it fails if unable to write config.
export HOME=$TMP
$out/bin/hetzner-kube completion bash > hetzner-kube
$out/bin/hetzner-kube completion zsh > _hetzner-kube
installShellCompletion --zsh _hetzner-kube

View file

@ -0,0 +1,53 @@
diff --git a/cmd/cluster_kubeconfig.go b/cmd/cluster_kubeconfig.go
index 54cc0c9..fab288a 100644
--- a/cmd/cluster_kubeconfig.go
+++ b/cmd/cluster_kubeconfig.go
@@ -6,7 +6,7 @@ import (
"io/ioutil"
"log"
"os"
- "os/user"
+ "path/filepath"
"strings"
"github.com/spf13/cobra"
@@ -52,9 +52,8 @@ Example 4: hetzner-kube cluster kubeconfig -n my-cluster -p > my-conf.yaml # pri
} else {
fmt.Println("create file")
- usr, _ := user.Current()
- dir := usr.HomeDir
- path := fmt.Sprintf("%s/.kube", dir)
+ dir, _ := os.UserHomeDir()
+ path := filepath.Join(dir, ".kube")
if _, err := os.Stat(path); os.IsNotExist(err) {
os.MkdirAll(path, 0755)
diff --git a/cmd/config.go b/cmd/config.go
index ce0f3e5..a03c4ba 100644
--- a/cmd/config.go
+++ b/cmd/config.go
@@ -8,7 +8,6 @@ import (
"io/ioutil"
"log"
"os"
- "os/user"
"path/filepath"
"github.com/hetznercloud/hcloud-go/hcloud"
@@ -28,13 +27,8 @@ type AppSSHClient struct {
// NewAppConfig creates a new AppConfig struct using the locally saved configuration file. If no local
// configuration file is found a new config will be created.
func NewAppConfig() AppConfig {
- usr, err := user.Current()
- if err != nil {
- return AppConfig{}
- }
- if usr.HomeDir != "" {
- DefaultConfigPath = filepath.Join(usr.HomeDir, ".hetzner-kube")
- }
+ dir, _ := os.UserHomeDir()
+ DefaultConfigPath = filepath.Join(dir, ".hetzner-kube")
appConf := AppConfig{
Context: context.Background(),

View file

@ -436,6 +436,15 @@
"sha256": "0jhx9rap4128j8sfkvpp8lbdmvdba0rkd3nxvy38wr3n18m7v1xg",
"version": "1.2.0"
},
"hydra": {
"owner": "DeterminateSystems",
"provider-source-address": "registry.terraform.io/DeterminateSystems/hydra",
"repo": "terraform-provider-hydra",
"rev": "v0.1.0",
"sha256": "18c9j54fy1f2sfz317rlv8z7fb18bpc1a0baw1bgl72x5sgil5kv",
"vendorSha256": null,
"version": "0.1.0"
},
"ibm": {
"owner": "IBM-Cloud",
"provider-source-address": "registry.terraform.io/IBM-Cloud/ibm",

View file

@ -2,13 +2,13 @@
buildGoModule rec {
pname = "nextdns";
version = "1.10.1";
version = "1.11.0";
src = fetchFromGitHub {
owner = "nextdns";
repo = "nextdns";
rev = "v${version}";
sha256 = "sha256-hMI6zq176p7MI4cjMSeQ8T8UvibJW60lzsPmeAOi3ow=";
sha256 = "sha256-gnWFgzfMMnn8O7zDN5LW3cMIz5/wmgEW9fI9aJBEah8=";
};
vendorSha256 = "sha256-kmszMqkDMaL+Z6GcZmQyeRShKKS/VGdn9vabYPW/kCc=";

View file

@ -21,6 +21,7 @@ It performs nonlinear dc and transient analyses, fourier analysis, and ac analys
changelog = "https://git.savannah.gnu.org/cgit/gnucap.git/plain/NEWS?h=v${version}";
license = licenses.gpl3Plus;
platforms = platforms.all;
broken = stdenv.isDarwin; # Relies on LD_LIBRARY_PATH
maintainers = [ maintainers.raboof ];
};
}

View file

@ -104,7 +104,9 @@ stdenv.mkDerivation rec {
tllist
wayland-protocols
pkg-config
] ++ lib.optional stdenv.cc.isClang stdenv.cc.cc.llvm;
] ++ lib.optionals (compilerName == "clang") [
stdenv.cc.cc.libllvm.out
];
buildInputs = [
fontconfig

View file

@ -13,11 +13,11 @@ let
in
stdenv.mkDerivation rec {
pname = "gitkraken";
version = "7.5.5";
version = "7.6.0";
src = fetchzip {
url = "https://release.axocdn.com/linux/GitKraken-v${version}.tar.gz";
sha256 = "0l40ap0ck2ywjarmn7lmpw4qbsdkx717d9kmx67p4qlmbwpimqhg";
sha256 = "11818d8ph9qqisdpkv46afhr79qq128gaz5d0n7b48dx25ih1jb9";
};
dontBuild = true;

View file

@ -438,6 +438,35 @@ rec {
done < graph
'';
/*
Write the set of references to a file, that is, their immediate dependencies.
This produces the equivalent of `nix-store -q --references`.
*/
writeDirectReferencesToFile = path: runCommand "runtime-references"
{
exportReferencesGraph = ["graph" path];
inherit path;
}
''
touch ./references
while read p; do
read dummy
read nrRefs
if [[ $p == $path ]]; then
for ((i = 0; i < nrRefs; i++)); do
read ref;
echo $ref >>./references
done
else
for ((i = 0; i < nrRefs; i++)); do
read ref;
done
fi
done < graph
sort ./references >$out
'';
/* Print an error message if the file with the specified name and
* hash doesn't exist in the Nix store. This function should only

View file

@ -0,0 +1,20 @@
{ lib, nixosTest, path, writeText, hello, figlet, stdenvNoCC }:
nixosTest {
name = "nixpkgs-trivial-builders";
nodes.machine = { ... }: {
virtualisation.writableStore = true;
# Test runs without network, so we don't substitute and prepare our deps
nix.binaryCaches = lib.mkForce [];
environment.etc."pre-built-paths".source = writeText "pre-built-paths" (
builtins.toJSON [hello figlet stdenvNoCC]
);
};
testScript = ''
machine.succeed("""
cd ${lib.cleanSource path}
./pkgs/build-support/trivial-builders/test.sh 2>/dev/console
""")
'';
}

View file

@ -0,0 +1,57 @@
#!/usr/bin/env bash
# -------------------------------------------------------------------------- #
#
# trivial-builders test
#
# -------------------------------------------------------------------------- #
#
# This file can be run independently (quick):
#
# $ pkgs/build-support/trivial-builders/test.sh
#
# or in the build sandbox with a ~20s VM overhead
#
# $ nix-build -A tests.trivial-builders
#
# -------------------------------------------------------------------------- #
# strict bash
set -euo pipefail
# debug
# set -x
# PS4='+(${BASH_SOURCE}:${LINENO}): ${FUNCNAME[0]:+${FUNCNAME[0]}(): }'
cd "$(dirname ${BASH_SOURCE[0]})" # nixpkgs root
testDirectReferences() {
expr="$1"
diff -U3 \
<(sort <$(nix-build --no-out-link --expr "with import ../../.. {}; writeDirectReferencesToFile ($expr)")) \
<(nix-store -q --references $(nix-build --no-out-link --expr "with import ../../.. {}; ($expr)") | sort)
}
testDirectReferences 'hello'
testDirectReferences 'figlet'
testDirectReferences 'writeText "hi" "hello"'
testDirectReferences 'writeText "hi" "hello ${hello}"'
testDirectReferences 'writeText "hi" "hello ${hello} ${figlet}"'
testClosure() {
expr="$1"
diff -U3 \
<(sort <$(nix-build --no-out-link --expr "with import ../../.. {}; writeReferencesToFile ($expr)")) \
<(nix-store -q --requisites $(nix-build --no-out-link --expr "with import ../../.. {}; ($expr)") | sort)
}
testClosure 'hello'
testClosure 'figlet'
testClosure 'writeText "hi" "hello"'
testClosure 'writeText "hi" "hello ${hello}"'
testClosure 'writeText "hi" "hello ${hello} ${figlet}"'
echo 'OK!'

View file

@ -1,9 +1,25 @@
{ stdenv, lib, buildPythonApplication, bottle
, click, click-completion, colorama, semantic-version
, lockfile, pyserial, requests
, tabulate, pyelftools, marshmallow
, pytest, tox, jsondiff
, git, spdx-license-list-data
{ stdenv, lib, buildPythonApplication
, ajsonrpc
, bottle
, click
, click-completion
, colorama
, git
, jsondiff
, lockfile
, marshmallow
, pyelftools
, pyserial
, pytest
, requests
, semantic-version
, spdx-license-list-data
, starlette
, tabulate
, tox
, uvicorn
, wsproto
, zeroconf
, version, src
}:
@ -79,9 +95,23 @@ in buildPythonApplication rec {
inherit version src;
propagatedBuildInputs = [
bottle click click-completion colorama git
lockfile pyserial requests semantic-version
tabulate pyelftools marshmallow
ajsonrpc
bottle
click
click-completion
colorama
git
lockfile
marshmallow
pyelftools
pyserial
requests
semantic-version
starlette
tabulate
uvicorn
wsproto
zeroconf
];
HOME = "/tmp";

View file

@ -4,14 +4,14 @@
let
callPackage = newScope self;
version = "5.0.4";
version = "5.1.1";
# pypi tarballs don't contain tests - https://github.com/platformio/platformio-core/issues/1964
src = fetchFromGitHub {
owner = "platformio";
repo = "platformio-core";
rev = "v${version}";
sha256 = "15jnhlhkk9z6cyzxw065r3080dqan951klwf65p152vfzg79wf84";
sha256 = "1m9vq5r4g04n3ckmb3hrrc4ar5v31k6isc76bw4glrn2xb7r8c00";
};
self = {

View file

@ -1,11 +1,15 @@
diff --git a/platformio/package/manifest/schema.py b/platformio/package/manifest/schema.py
index f293ba5a..a818271f 100644
index addc4c5..514b0ad 100644
--- a/platformio/package/manifest/schema.py
+++ b/platformio/package/manifest/schema.py
@@ -252,5 +252,4 @@ class ManifestSchema(BaseSchema):
@@ -253,9 +253,4 @@ class ManifestSchema(BaseSchema):
@staticmethod
@memoized(expire="1h")
def load_spdx_licenses():
- spdx_data_url = "https://dl.bintray.com/platformio/dl-misc/spdx-licenses-3.json"
- version = "3.12"
- spdx_data_url = (
- "https://raw.githubusercontent.com/spdx/license-list-data/"
- "v%s/json/licenses.json" % version
- )
- return json.loads(fetch_remote_content(spdx_data_url))
+ return json.load(open("@SPDX_LICENSE_LIST_DATA@/json/licenses.json"))

View file

@ -23,6 +23,7 @@ let
# rebar3 port compiler plugin is required by buildRebar3
pc = callPackage ./pc { };
rebar3-nix = callPackage ./rebar3-nix { };
fetchHex = callPackage ./fetch-hex.nix { };

View file

@ -0,0 +1,18 @@
{ lib, buildRebar3, fetchFromGitHub }:
buildRebar3 rec {
name = "rebar3_nix";
version = "0.1.0";
src = fetchFromGitHub {
owner = "erlang-nix";
repo = name;
rev = "v${version}";
sha256 = "17w8m4aqqgvhpx3xyc7x2qzsrd3ybzc83ay50zs1gyd1b8csh2wf";
};
meta = {
description = "nix integration for rebar3";
license = lib.licenses.bsd3;
homepage = "https://github.com/erlang-nix/rebar3_nix";
maintainers = with lib.maintainers; [ dlesl gleber ];
};
}

View file

@ -0,0 +1,72 @@
{ autoPatchelfHook, fetchurl, lib, makeWrapper, openssl, stdenv }:
stdenv.mkDerivation rec {
pname = "julia-bin";
version = "1.0.5";
src = {
x86_64-linux = fetchurl {
url = "https://julialang-s3.julialang.org/bin/linux/x64/${lib.versions.majorMinor version}/julia-${version}-linux-x86_64.tar.gz";
sha256 = "00vbszpjmz47nqy19v83xa463ajhzwanjyg5mvcfp9kvfw9xdvcx";
};
}.${stdenv.hostPlatform.system} or (throw "Unsupported system: ${stdenv.hostPlatform.system}");
# Julias source files are in different locations for source and binary
# releases. Thus we temporarily create symlinks to allow us to share patches
# with source releases.
prePatch = ''
ln -s share/julia/stdlib/v${lib.versions.majorMinor version} stdlib
ln -s share/julia/test
'';
patches = [
# Source release Nix patch(es) relevant for binary releases as well.
./patches/1.0-bin/0002-nix-Skip-tests-that-require-network-access.patch
];
postPatch = ''
# Revert symlink hack.
rm stdlib test
'';
buildInputs = [ makeWrapper ];
nativeBuildInputs = [ autoPatchelfHook ];
installPhase = ''
runHook preInstall
cp -r . $out
# Setting `LD_LIBRARY_PATH` resolves `Libdl` failures. Not sure why this is
# only necessary on v1.0.x and a cleaner solution is welcome, but after
# staring at `strace` for a few hours this is as clean as I could make it.
wrapProgram $out/bin/julia \
--suffix LD_LIBRARY_PATH : $out/lib
runHook postInstall
'';
# Breaks backtraces, etc.
dontStrip = true;
doInstallCheck = true;
installCheckInputs = [ openssl ];
preInstallCheck = ''
# Some tests require read/write access to $HOME.
export HOME="$TMPDIR"
'';
installCheckPhase = ''
runHook preInstallCheck
# Command lifted from `test/Makefile`.
$out/bin/julia \
--check-bounds=yes \
--startup-file=no \
--depwarn=error \
$out/share/julia/test/runtests.jl
runHook postInstallCheck
'';
meta = {
description = "High-level, high-performance dynamic language for technical computing";
homepage = "https://julialang.org";
# Bundled and linked with various GPL code, although Julia itself is MIT.
license = lib.licenses.gpl2Plus;
maintainers = with lib.maintainers; [ ninjin raskin ];
platforms = [ "x86_64-linux" ];
};
}

View file

@ -0,0 +1,73 @@
{ autoPatchelfHook, fetchurl, lib, stdenv }:
stdenv.mkDerivation rec {
pname = "julia-bin";
version = "1.6.1";
src = {
x86_64-linux = fetchurl {
url = "https://julialang-s3.julialang.org/bin/linux/x64/${lib.versions.majorMinor version}/julia-${version}-linux-x86_64.tar.gz";
sha256 = "01i5sm4vqb0y5qznql571zap19b42775drrcxnzsyhpaqgg8m23w";
};
}.${stdenv.hostPlatform.system} or (throw "Unsupported system: ${stdenv.hostPlatform.system}");
# Julias source files are in different locations for source and binary
# releases. Thus we temporarily create a symlink to allow us to share patches
# with source releases.
prePatch = ''
ln -s share/julia/test
'';
patches = [
# Source release Nix patch(es) relevant for binary releases as well.
./patches/1.6-bin/0002-nix-Skip-tempname-test-broken-in-sandbox.patch
./patches/1.6-bin/0003-nix-Skip-chown-tests-broken-in-sandbox.patch
./patches/1.6-bin/0005-nix-Enable-parallel-unit-tests-for-sandbox.patch
];
postPatch = ''
# Revert symlink hack.
rm test
# Julia fails to pick up our Certification Authority root certificates, but
# it provides its own so we can simply disable the test. Patching in the
# dynamic path to ours require us to rebuild the Julia system image.
substituteInPlace share/julia/stdlib/v${lib.versions.majorMinor version}/NetworkOptions/test/runtests.jl \
--replace '@test ca_roots_path() != bundled_ca_roots()' \
'@test_skip ca_roots_path() != bundled_ca_roots()'
'';
nativeBuildInputs = [ autoPatchelfHook ];
installPhase = ''
runHook preInstall
cp -r . $out
runHook postInstall
'';
# Breaks backtraces, etc.
dontStrip = true;
doInstallCheck = true;
preInstallCheck = ''
# Some tests require read/write access to $HOME.
export HOME="$TMPDIR"
'';
installCheckPhase = ''
runHook preInstallCheck
# Command lifted from `test/Makefile`.
$out/bin/julia \
--check-bounds=yes \
--startup-file=no \
--depwarn=error \
$out/share/julia/test/runtests.jl
runHook postInstallCheck
'';
meta = {
description = "High-level, high-performance dynamic language for technical computing.";
homepage = "https://julialang.org";
# Bundled and linked with various GPL code, although Julia itself is MIT.
license = lib.licenses.gpl2Plus;
maintainers = with lib.maintainers; [ ninjin raskin ];
platforms = [ "x86_64-linux" ];
};
}

View file

@ -0,0 +1,87 @@
From 4954b99efae367da49412edd31a7bd832ec62c69 Mon Sep 17 00:00:00 2001
From: Pontus Stenetorp <pontus@stenetorp.se>
Date: Mon, 15 Mar 2021 05:55:18 +0000
Subject: [PATCH 2/3] nix: Skip tests that require network access
Necessary as the Nix build sandbox does not permit network access.
---
stdlib/Sockets/test/runtests.jl | 40 ++++++++++++++++-----------------
test/file.jl | 4 ++--
2 files changed, 22 insertions(+), 22 deletions(-)
diff --git a/stdlib/Sockets/test/runtests.jl b/stdlib/Sockets/test/runtests.jl
index 6145f87616..9cc7a001e5 100644
--- a/stdlib/Sockets/test/runtests.jl
+++ b/stdlib/Sockets/test/runtests.jl
@@ -151,33 +151,33 @@ defaultport = rand(2000:4000)
end
@testset "getnameinfo on some unroutable IP addresses (RFC 5737)" begin
- @test getnameinfo(ip"192.0.2.1") == "192.0.2.1"
- @test getnameinfo(ip"198.51.100.1") == "198.51.100.1"
- @test getnameinfo(ip"203.0.113.1") == "203.0.113.1"
- @test getnameinfo(ip"0.1.1.1") == "0.1.1.1"
- @test getnameinfo(ip"::ffff:0.1.1.1") == "::ffff:0.1.1.1"
- @test getnameinfo(ip"::ffff:192.0.2.1") == "::ffff:192.0.2.1"
- @test getnameinfo(ip"2001:db8::1") == "2001:db8::1"
+ @test_skip getnameinfo(ip"192.0.2.1") == "192.0.2.1"
+ @test_skip getnameinfo(ip"198.51.100.1") == "198.51.100.1"
+ @test_skip getnameinfo(ip"203.0.113.1") == "203.0.113.1"
+ @test_skip getnameinfo(ip"0.1.1.1") == "0.1.1.1"
+ @test_skip getnameinfo(ip"::ffff:0.1.1.1") == "::ffff:0.1.1.1"
+ @test_skip getnameinfo(ip"::ffff:192.0.2.1") == "::ffff:192.0.2.1"
+ @test_skip getnameinfo(ip"2001:db8::1") == "2001:db8::1"
end
@testset "getnameinfo on some valid IP addresses" begin
@test !isempty(getnameinfo(ip"::")::String)
- @test !isempty(getnameinfo(ip"0.0.0.0")::String)
- @test !isempty(getnameinfo(ip"10.1.0.0")::String)
- @test !isempty(getnameinfo(ip"10.1.0.255")::String)
- @test !isempty(getnameinfo(ip"10.1.255.1")::String)
- @test !isempty(getnameinfo(ip"255.255.255.255")::String)
- @test !isempty(getnameinfo(ip"255.255.255.0")::String)
- @test !isempty(getnameinfo(ip"192.168.0.1")::String)
- @test !isempty(getnameinfo(ip"::1")::String)
+ @test_skip !isempty(getnameinfo(ip"0.0.0.0")::String)
+ @test_skip !isempty(getnameinfo(ip"10.1.0.0")::String)
+ @test_skip !isempty(getnameinfo(ip"10.1.0.255")::String)
+ @test_skip !isempty(getnameinfo(ip"10.1.255.1")::String)
+ @test_skip !isempty(getnameinfo(ip"255.255.255.255")::String)
+ @test_skip !isempty(getnameinfo(ip"255.255.255.0")::String)
+ @test_skip !isempty(getnameinfo(ip"192.168.0.1")::String)
+ @test_skip !isempty(getnameinfo(ip"::1")::String)
end
@testset "getaddrinfo" begin
- let localhost = getnameinfo(ip"127.0.0.1")::String
- @test !isempty(localhost) && localhost != "127.0.0.1"
- @test !isempty(getalladdrinfo(localhost)::Vector{IPAddr})
- @test getaddrinfo(localhost, IPv4)::IPv4 != ip"0.0.0.0"
- @test try
+ let localhost = getnameinfo(ip"::")::String
+ @test_skip !isempty(localhost) && localhost != "127.0.0.1"
+ @test_skip !isempty(getalladdrinfo(localhost)::Vector{IPAddr})
+ @test_skip getaddrinfo(localhost, IPv4)::IPv4 != ip"0.0.0.0"
+ @test_skip try
getaddrinfo(localhost, IPv6)::IPv6 != ip"::"
catch ex
isa(ex, Sockets.DNSError) && ex.code == Base.UV_EAI_NONAME && ex.host == localhost
diff --git a/test/file.jl b/test/file.jl
index e86476f975..579276f82c 100644
--- a/test/file.jl
+++ b/test/file.jl
@@ -874,8 +874,8 @@ if !Sys.iswindows() || (Sys.windows_version() >= Sys.WINDOWS_VISTA_VER)
else
@test_throws ErrorException symlink(file, "ba\0d")
end
-@test_throws ArgumentError download("good", "ba\0d")
-@test_throws ArgumentError download("ba\0d", "good")
+@test_skip @test_throws ArgumentError download("good", "ba\0d")
+@test_skip @test_throws ArgumentError download("ba\0d", "good")
###################
# walkdir #
--
2.29.3

View file

@ -0,0 +1,28 @@
From ffe227676352a910754d96d92e9b06e475f28ff1 Mon Sep 17 00:00:00 2001
From: Pontus Stenetorp <pontus@stenetorp.se>
Date: Thu, 8 Apr 2021 04:25:19 +0000
Subject: [PATCH 2/6] nix: Skip `tempname` test broken in sandbox
Reported upstream:
https://github.com/JuliaLang/julia/issues/38873
---
test/file.jl | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/test/file.jl b/test/file.jl
index 0f39bc7c14..bd4dd78f62 100644
--- a/test/file.jl
+++ b/test/file.jl
@@ -95,7 +95,7 @@ end
@test dirname(t) == tempdir()
mktempdir() do d
t = tempname(d)
- @test dirname(t) == d
+ @test_skip dirname(t) == d
end
@test_throws ArgumentError tempname(randstring())
end
--
2.29.3

View file

@ -0,0 +1,27 @@
From b20357fb1044d2c100172b1d5cbdf6c6d9bd3590 Mon Sep 17 00:00:00 2001
From: Pontus Stenetorp <pontus@stenetorp.se>
Date: Thu, 8 Apr 2021 05:10:39 +0000
Subject: [PATCH 3/6] nix: Skip `chown` tests broken in sandbox
---
test/file.jl | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/test/file.jl b/test/file.jl
index bd4dd78f62..06fd4e49da 100644
--- a/test/file.jl
+++ b/test/file.jl
@@ -503,8 +503,8 @@ if !Sys.iswindows()
@test stat(file).gid == 0
@test stat(file).uid == 0
else
- @test_throws Base._UVError("chown($(repr(file)), -2, -1)", Base.UV_EPERM) chown(file, -2, -1) # Non-root user cannot change ownership to another user
- @test_throws Base._UVError("chown($(repr(file)), -1, -2)", Base.UV_EPERM) chown(file, -1, -2) # Non-root user cannot change group to a group they are not a member of (eg: nogroup)
+ @test_skip @test_throws Base._UVError("chown($(repr(file)), -2, -1)", Base.UV_EPERM) chown(file, -2, -1) # Non-root user cannot change ownership to another user
+ @test_skip @test_throws Base._UVError("chown($(repr(file)), -1, -2)", Base.UV_EPERM) chown(file, -1, -2) # Non-root user cannot change group to a group they are not a member of (eg: nogroup)
end
else
# test that chown doesn't cause any errors for Windows
--
2.29.3

View file

@ -0,0 +1,30 @@
From 44c2c979c4f2222567ce65f506cf47fb87482348 Mon Sep 17 00:00:00 2001
From: Pontus Stenetorp <pontus@stenetorp.se>
Date: Thu, 8 Apr 2021 04:37:44 +0000
Subject: [PATCH 5/6] nix: Enable parallel unit tests for sandbox
Disabled by default due to lack of networking in the Nix sandbox. This
greatly speeds up the build process on a multi-core system.
---
test/runtests.jl | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/test/runtests.jl b/test/runtests.jl
index 2f9cd058bb..2f8c19fa32 100644
--- a/test/runtests.jl
+++ b/test/runtests.jl
@@ -83,8 +83,9 @@ prepend!(tests, linalg_tests)
import LinearAlgebra
cd(@__DIR__) do
n = 1
- if net_on
- n = min(Sys.CPU_THREADS, length(tests))
+ if net_on || haskey(ENV, "NIX_BUILD_CORES")
+ x = haskey(ENV, "NIX_BUILD_CORES") ? parse(Int, ENV["NIX_BUILD_CORES"]) : Sys.CPU_THREADS
+ n = min(x, Sys.CPU_THREADS, length(tests))
n > 1 && addprocs_with_testenv(n)
LinearAlgebra.BLAS.set_num_threads(1)
end
--
2.29.3

View file

@ -0,0 +1,46 @@
{ lib, mkCoqDerivation, coq, version ? null }:
with lib;
mkCoqDerivation {
pname = "aac-tactics";
releaseRev = v: "v${v}";
release."8.13.0".sha256 = "sha256-MAnMc4KzC551JInrRcfKED4nz04FO0GyyyuDVRmnYTY=";
release."8.12.0".sha256 = "sha256-dPNA19kZo/2t3rbyX/R5yfGcaEfMhbm9bo71Uo4ZwoM=";
release."8.11.0".sha256 = "sha256-CKKMiJLltIb38u+ZKwfQh/NlxYawkafp+okY34cGCYU=";
release."8.10.0".sha256 = "sha256-Ny3AgfLAzrz3FnoUqejXLApW+krlkHBmYlo3gAG0JsM=";
release."8.9.0".sha256 = "sha256-6Pp0dgYEnVaSnkJR/2Cawt5qaxWDpBI4m0WAbQboeWY=";
release."8.8.0".sha256 = "sha256-mwIKp3kf/6i9IN3cyIWjoRtW8Yf8cc3MV744zzFM3u4=";
release."8.6.1".sha256 = "sha256-PfovQ9xJnzr0eh/tO66yJ3Yp7A5E1SQG46jLIrrbZFg=";
release."8.5.0".sha256 = "sha256-7yNxJn6CH5xS5w/zsXfcZYORa6e5/qS9v8PUq2o02h4=";
inherit version;
defaultVersion = with versions; switch coq.coq-version [
{ case = "8.13"; out = "8.13.0"; }
{ case = "8.12"; out = "8.12.0"; }
{ case = "8.11"; out = "8.11.0"; }
{ case = "8.10"; out = "8.10.0"; }
{ case = "8.9"; out = "8.9.0"; }
{ case = "8.8"; out = "8.8.0"; }
{ case = "8.6"; out = "8.6.1"; }
{ case = "8.5"; out = "8.5.0"; }
] null;
mlPlugin = true;
meta = {
description = "Coq plugin providing tactics for rewriting universally quantified equations";
longDescription = ''
This Coq plugin provides tactics for rewriting universally quantified
equations, modulo associativity and commutativity of some operator.
The tactics can be applied for custom operators by registering the
operators and their properties as type class instances. Many common
operator instances, such as for Z binary arithmetic and booleans, are
provided with the plugin.
'';
maintainers = with maintainers; [ siraben ];
license = licenses.gpl3Plus;
platforms = platforms.unix;
};
}

View file

@ -0,0 +1,35 @@
{ lib, mkCoqDerivation, coq, aac-tactics, mathcomp, version ? null }:
with lib;
mkCoqDerivation {
pname = "relation-algebra";
owner = "damien-pous";
releaseRev = v: "v${v}";
release."1.7.5".sha256 = "sha256-XdO8agoJmNXPv8Ho+KTlLCB4oRlQsb0w06aM9M16ZBU=";
release."1.7.4".sha256 = "sha256-o+v2CIAa2+9tJ/V8DneDTf4k31KMHycgMBLaQ+A4ufM=";
release."1.7.3".sha256 = "sha256-4feSNfi7h4Yhwn5L+9KP9K1S7HCPvsvaVWwoQSTFvos=";
release."1.7.2".sha256 = "sha256-f4oNjXspNMEz3AvhIeYO3avbUa1AThoC9DbcHMb5A2o=";
release."1.7.1".sha256 = "sha256-WWVMcR6z8rT4wzZPb8SlaVWGe7NC8gScPqawd7bltQA=";
inherit version;
defaultVersion = with versions; switch coq.coq-version [
{ case = isGe "8.13"; out = "1.7.5"; }
{ case = isGe "8.12"; out = "1.7.4"; }
{ case = isGe "8.11"; out = "1.7.3"; }
{ case = isGe "8.10"; out = "1.7.2"; }
{ case = isGe "8.9"; out = "1.7.1"; }
] null;
mlPlugin = true;
propagatedBuildInputs = [ aac-tactics mathcomp.ssreflect ];
meta = {
description = "Relation algebra library for Coq";
maintainers = with maintainers; [ siraben ];
license = licenses.gpl3Plus;
platforms = platforms.unix;
};
}

View file

@ -3,13 +3,13 @@
stdenv.mkDerivation rec
{
pname = "alembic";
version = "1.8.0";
version = "1.8.1";
src = fetchFromGitHub {
owner = "alembic";
repo = "alembic";
rev = version;
sha256 = "sha256-c4SN3kNY8415+O/2AYuHNQFEmuTBtLaWj5fsj0yJ2vs=";
sha256 = "sha256-ObjpWreabeVzKYVgC62JaoGUf1BZCxP0STjox3akDvo=";
};
outputs = [ "bin" "dev" "out" "lib" ];

View file

@ -12,13 +12,12 @@
, libical
, python3
, tzdata
, fixDarwinDylibNames
, introspectionSupport ? stdenv.buildPlatform == stdenv.hostPlatform
, gobject-introspection ? null
, vala ? null
, gobject-introspection
, vala
}:
assert introspectionSupport -> gobject-introspection != null && vala != null;
stdenv.mkDerivation rec {
pname = "libical";
version = "3.0.10";
@ -47,6 +46,8 @@ stdenv.mkDerivation rec {
] ++ lib.optionals introspectionSupport [
gobject-introspection
vala
] ++ lib.optionals stdenv.isDarwin [
fixDarwinDylibNames
];
installCheckInputs = [
# running libical-glib tests
@ -80,6 +81,13 @@ stdenv.mkDerivation rec {
# LD_LIBRARY_PATH and GI_TYPELIB_PATH variables
doInstallCheck = true;
enableParallelChecking = false;
preInstallCheck = if stdenv.isDarwin then ''
for testexe in $(find ./src/test -maxdepth 1 -type f -executable); do
for lib in $(cd lib && ls *.3.dylib); do
install_name_tool -change $lib $out/lib/$lib $testexe
done
done
'' else null;
installCheckPhase = ''
runHook preInstallCheck

View file

@ -1,5 +1,5 @@
{ stdenv, lib, fetchgit, fetchFromGitHub
, gn, ninja, python, glib, pkg-config, icu
, gn, ninja, python, pythonPackages, glib, pkg-config, icu
, xcbuild, darwin
, fetchpatch
}:
@ -55,6 +55,11 @@ stdenv.mkDerivation rec {
doCheck = true;
patches = [
# Remove unrecognized clang debug flags
(fetchpatch {
url = "https://raw.githubusercontent.com/saiarcot895/chromium-ubuntu-build/663dbfc492fd2f8ba28d9af40fb3b1327e6aa56e/debian/patches/revert-Xclang-instcombine-lower-dbg-declare.patch";
sha256 = "07qp4bjgbwbdrzqslvl2bgbzr3v97b9isbp0539x3lc8cy3h02g1";
})
./darwin.patch
./gcc_arm.patch # Fix building zlib with gcc on aarch64, from https://gist.github.com/Adenilson/d973b6fd96c7709d33ddf08cf1dcb149
];
@ -78,6 +83,13 @@ stdenv.mkDerivation rec {
postPatch = lib.optionalString stdenv.isAarch64 ''
substituteInPlace build/toolchain/linux/BUILD.gn \
--replace 'toolprefix = "aarch64-linux-gnu-"' 'toolprefix = ""'
'' + lib.optionalString stdenv.isDarwin ''
substituteInPlace build/config/compiler/compiler.gni \
--replace 'strip_absolute_paths_from_debug_symbols = true' \
'strip_absolute_paths_from_debug_symbols = false'
substituteInPlace build/config/compiler/BUILD.gn \
--replace 'current_toolchain == host_toolchain || !use_xcode_clang' \
'false'
'';
gnFlags = [
@ -85,6 +97,7 @@ stdenv.mkDerivation rec {
"is_clang=${lib.boolToString stdenv.cc.isClang}"
"use_sysroot=false"
# "use_system_icu=true"
"clang_use_chrome_plugins=false"
"is_component_build=false"
"v8_use_external_startup_data=false"
"v8_monolithic=true"
@ -93,16 +106,25 @@ stdenv.mkDerivation rec {
"treat_warnings_as_errors=false"
"v8_enable_i18n_support=true"
"use_gold=false"
"use_system_xcode=true"
"init_stack_vars=false"
# ''custom_toolchain="//build/toolchain/linux/unbundle:default"''
''host_toolchain="//build/toolchain/linux/unbundle:default"''
''v8_snapshot_toolchain="//build/toolchain/linux/unbundle:default"''
] ++ lib.optional stdenv.cc.isClang ''clang_base_path="${stdenv.cc}"'';
NIX_CFLAGS_COMPILE = "-O2";
FORCE_MAC_SDK_MIN = stdenv.targetPlatform.sdkVer or "10.12";
nativeBuildInputs = [ gn ninja pkg-config python ]
++ lib.optionals stdenv.isDarwin [ xcbuild darwin.DarwinTools ];
nativeBuildInputs = [
gn
ninja
pkg-config
python
] ++ lib.optionals stdenv.isDarwin [
xcbuild
darwin.DarwinTools
pythonPackages.setuptools
];
buildInputs = [ glib icu ];
ninjaFlags = [ ":d8" "v8_monolith" ];

View file

@ -1,6 +1,7 @@
{ lib
, buildPythonPackage
, fetchPypi
, pythonOlder
, pyyaml
, prance
, marshmallow
@ -11,16 +12,20 @@
buildPythonPackage rec {
pname = "apispec";
version = "4.3.0";
version = "4.4.1";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
sha256 = "5ec0fe72f1422a1198973fcbb48d0eb5c7390f4b0fbe55474fce999ad6826a9b";
sha256 = "sha256-qt7UrkUXUsWLcOV5kj2Nt9rwtx9i3vjI/noqUr18BqI=";
};
checkInputs = [
propagatedBuildInputs = [
pyyaml
prance
];
checkInputs = [
openapi-spec-validator
marshmallow
mock

View file

@ -8,7 +8,7 @@
, numpy
, pillow
, scipy
, scikitlearn
, scikit-learn
, scikitimage
, threadpoolctl
}:
@ -28,7 +28,7 @@ buildPythonPackage rec {
};
propagatedBuildInputs = [
future numpy pillow scipy scikitlearn scikitimage threadpoolctl
future numpy pillow scipy scikit-learn scikitimage threadpoolctl
];
checkInputs = [ pytestCheckHook unittest2 ];

View file

@ -2,7 +2,7 @@
, buildPythonPackage
, fetchFromGitHub
, python
, scikitlearn
, scikit-learn
, scipy
, pytest
, isPy27
@ -21,7 +21,7 @@ buildPythonPackage rec {
};
propagatedBuildInputs = [
scikitlearn
scikit-learn
scipy
];

View file

@ -6,7 +6,7 @@
, biopython
, numpy
, scipy
, scikitlearn
, scikit-learn
, pandas
, matplotlib
, reportlab
@ -42,7 +42,7 @@ buildPythonPackage rec {
biopython
numpy
scipy
scikitlearn
scikit-learn
pandas
matplotlib
reportlab

View file

@ -7,7 +7,7 @@
, multipledispatch
, setuptools-scm
, scipy
, scikitlearn
, scikit-learn
, pytestCheckHook
}:
@ -22,7 +22,7 @@ buildPythonPackage rec {
nativeBuildInputs = [ setuptools-scm ];
checkInputs = [ pytestCheckHook ];
propagatedBuildInputs = [ cloudpickle dask numpy toolz multipledispatch scipy scikitlearn ];
propagatedBuildInputs = [ cloudpickle dask numpy toolz multipledispatch scipy scikit-learn ];
meta = with lib; {
homepage = "https://github.com/dask/dask-glm/";

View file

@ -6,7 +6,7 @@
, numpy, toolz # dask[array]
, numba
, pandas
, scikitlearn
, scikit-learn
, scipy
, dask-glm
, six
@ -39,7 +39,7 @@ buildPythonPackage rec {
numpy
packaging
pandas
scikitlearn
scikit-learn
scipy
six
toolz

View file

@ -8,7 +8,7 @@
, pandas
, scipy
, numpy
, scikitlearn
, scikit-learn
, lammps-cython
, pymatgen-lammps
, pytestrunner
@ -34,7 +34,7 @@ buildPythonPackage rec {
pandas
scipy
numpy
scikitlearn
scikit-learn
lammps-cython
pymatgen-lammps
];

View file

@ -5,7 +5,7 @@
, six
, scipy
, smart_open
, scikitlearn, testfixtures, unittest2
, scikit-learn, testfixtures, unittest2
, isPy3k
}:
@ -21,7 +21,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [ smart_open numpy six scipy ];
checkInputs = [ scikitlearn testfixtures unittest2 ];
checkInputs = [ scikit-learn testfixtures unittest2 ];
# Two tests fail.
#

View file

@ -8,7 +8,7 @@
, matplotlib
, networkx
, numpy
, scikitlearn
, scikit-learn
, scipy
, seaborn
}:
@ -31,7 +31,7 @@ buildPythonPackage rec {
matplotlib
networkx
numpy
scikitlearn
scikit-learn
scipy
seaborn
];

View file

@ -5,7 +5,7 @@
, numpy
, pytestCheckHook
, scipy
, scikitlearn
, scikit-learn
, fetchPypi
, joblib
, six
@ -29,7 +29,7 @@ buildPythonPackage rec {
];
nativeBuildInputs = [ cython ];
propagatedBuildInputs = [ numpy scipy scikitlearn joblib six ];
propagatedBuildInputs = [ numpy scipy scikit-learn joblib six ];
preCheck = ''
cd hdbscan/tests
rm __init__.py

View file

@ -1,4 +1,4 @@
{ lib, fetchurl, buildPythonPackage, numpy, scikitlearn, setuptools_scm, cython, pytest }:
{ lib, fetchurl, buildPythonPackage, numpy, scikit-learn, setuptools_scm, cython, pytest }:
buildPythonPackage rec {
pname = "hmmlearn";
@ -10,7 +10,7 @@ buildPythonPackage rec {
};
buildInputs = [ setuptools_scm cython ];
propagatedBuildInputs = [ numpy scikitlearn ];
propagatedBuildInputs = [ numpy scikit-learn ];
checkInputs = [ pytest ];
checkPhase = ''

View file

@ -4,7 +4,7 @@
, fetchFromGitHub
, pytestCheckHook , pytestcov , numba
, numpy
, scikitlearn
, scikit-learn
, scipy
, matplotlib
, seaborn
@ -26,7 +26,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numba
numpy
scikitlearn
scikit-learn
scipy
];

View file

@ -8,7 +8,7 @@
, mock
, pytorch
, pynvml
, scikitlearn
, scikit-learn
, tqdm
}:
@ -24,7 +24,7 @@ buildPythonPackage rec {
};
checkInputs = [ pytestCheckHook matplotlib mock pytest_xdist ];
propagatedBuildInputs = [ pytorch scikitlearn tqdm pynvml ];
propagatedBuildInputs = [ pytorch scikit-learn tqdm pynvml ];
# runs succesfully in 3.9, however, async isn't correctly closed so it will fail after test suite.
doCheck = pythonOlder "3.9";

View file

@ -1,4 +1,4 @@
{ lib, buildPythonPackage, fetchPypi, scikitlearn, pandas, nose, pytest }:
{ lib, buildPythonPackage, fetchPypi, scikit-learn, pandas, nose, pytest }:
buildPythonPackage rec {
pname = "imbalanced-learn";
@ -9,7 +9,7 @@ buildPythonPackage rec {
sha256 = "5bd9e86e40ce4001a57426541d7c79b18143cbd181e3330c1a3e5c5c43287083";
};
propagatedBuildInputs = [ scikitlearn ];
propagatedBuildInputs = [ scikit-learn ];
checkInputs = [ nose pytest pandas ];
checkPhase = ''
export HOME=$PWD

View file

@ -4,7 +4,7 @@
, isPy27
, pandas
, pytestCheckHook
, scikitlearn
, scikit-learn
}:
buildPythonPackage rec {
@ -17,7 +17,7 @@ buildPythonPackage rec {
sha256 = "0a9xrw4qsh95g85pg2611hvj6xcfncw646si2icaz22haw1x410w";
};
propagatedBuildInputs = [ scikitlearn ];
propagatedBuildInputs = [ scikit-learn ];
checkInputs = [ pytestCheckHook pandas ];
preCheck = ''
export HOME=$TMPDIR

View file

@ -1,7 +1,7 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, scikitlearn
, scikit-learn
, numpy
, scipy
, jinja2
@ -25,7 +25,7 @@ buildPythonPackage rec {
};
propagatedBuildInputs = [
scikitlearn
scikit-learn
numpy
scipy
jinja2

View file

@ -4,7 +4,7 @@
, joblib
, matplotlib
, six
, scikitlearn
, scikit-learn
, decorator
, audioread
, resampy
@ -21,7 +21,7 @@ buildPythonPackage rec {
sha256 = "af0b9f2ed4bbf6aecbc448a4cd27c16453c397cb6bef0f0cfba0e63afea2b839";
};
propagatedBuildInputs = [ joblib matplotlib six scikitlearn decorator audioread resampy soundfile pooch ];
propagatedBuildInputs = [ joblib matplotlib six scikit-learn decorator audioread resampy soundfile pooch ];
# No tests
# 1. Internet connection is required

View file

@ -4,7 +4,7 @@
, cmake
, numpy
, scipy
, scikitlearn
, scikit-learn
, llvmPackages ? null
}:
@ -39,7 +39,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numpy
scipy
scikitlearn
scikit-learn
];
postConfigure = ''

View file

@ -3,7 +3,7 @@
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, scikitlearn
, scikit-learn
, pytestCheckHook
, pytest-randomly
}:
@ -28,7 +28,7 @@ buildPythonPackage rec {
})
];
propagatedBuildInputs = [ scikitlearn ];
propagatedBuildInputs = [ scikit-learn ];
checkInputs = [ pytest-randomly pytestCheckHook ];
postPatch = ''

View file

@ -5,7 +5,7 @@
, pytestCheckHook
, scipy
, numpy
, scikitlearn
, scikit-learn
, pandas
, matplotlib
, joblib
@ -33,7 +33,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
scipy
numpy
scikitlearn
scikit-learn
pandas
matplotlib
joblib
@ -45,7 +45,7 @@ buildPythonPackage rec {
license= licenses.bsd3;
maintainers = with maintainers; [ evax ];
platforms = platforms.unix;
# incompatible with nixpkgs scikitlearn version
# incompatible with nixpkgs scikit-learn version
broken = true;
};
}

View file

@ -11,7 +11,7 @@
, matplotlib
, nibabel
, pandas
, scikitlearn
, scikit-learn
}:
buildPythonPackage rec {
@ -40,7 +40,7 @@ buildPythonPackage rec {
matplotlib
nibabel
pandas
scikitlearn
scikit-learn
];
preCheck = ''
export HOME=$TMP

View file

@ -13,14 +13,14 @@
, advancedProcessing ? false
, opencv3 ? null
, scikitimage ? null
, scikitlearn ? null
, scikit-learn ? null
, scipy ? null
, matplotlib ? null
, youtube-dl ? null
}:
assert advancedProcessing -> (
opencv3 != null && scikitimage != null && scikitlearn != null
opencv3 != null && scikitimage != null && scikit-learn != null
&& scipy != null && matplotlib != null && youtube-dl != null);
buildPythonPackage rec {
@ -40,7 +40,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numpy decorator imageio imageio-ffmpeg tqdm requests proglog
] ++ (lib.optionals advancedProcessing [
opencv3 scikitimage scikitlearn scipy matplotlib youtube-dl
opencv3 scikitimage scikit-learn scipy matplotlib youtube-dl
]);
meta = with lib; {

View file

@ -1,5 +1,5 @@
{ lib, buildPythonPackage, fetchPypi, pytestCheckHook, matplotlib
, nibabel, numpy, pandas, scikitlearn, scipy, joblib, requests }:
, nibabel, numpy, pandas, scikit-learn, scipy, joblib, requests }:
buildPythonPackage rec {
pname = "nilearn";
@ -20,7 +20,7 @@ buildPythonPackage rec {
numpy
pandas
requests
scikitlearn
scikit-learn
scipy
];

View file

@ -1,5 +1,5 @@
{ lib, buildPythonPackage, fetchFromGitHub, geopandas, descartes, matplotlib, networkx, numpy
, pandas, requests, Rtree, shapely, folium, scikitlearn, scipy}:
, pandas, requests, Rtree, shapely, folium, scikit-learn, scipy}:
buildPythonPackage rec {
pname = "osmnx";
@ -12,7 +12,7 @@ buildPythonPackage rec {
sha256 = "1k3y5kl4k93vxaxyanc040x44s2fyyc3m1ndy2j3kg0037z8ad4z";
};
propagatedBuildInputs = [ geopandas descartes matplotlib networkx numpy pandas requests Rtree shapely folium scikitlearn scipy ];
propagatedBuildInputs = [ geopandas descartes matplotlib networkx numpy pandas requests Rtree shapely folium scikit-learn scipy ];
# requires network
doCheck = false;

View file

@ -6,7 +6,7 @@
, joblib
, matplotlib
, numpy
, scikitlearn
, scikit-learn
, scipy
, pytestCheckHook
}:
@ -26,7 +26,7 @@ buildPythonPackage rec {
joblib
matplotlib
numpy
scikitlearn
scikit-learn
scipy
];

View file

@ -3,7 +3,7 @@
, fetchFromGitHub
, isPy27
, pandas
, scikitlearn
, scikit-learn
, pytestCheckHook
}:
@ -23,7 +23,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
pandas
scikitlearn
scikit-learn
];
meta = with lib; {

View file

@ -4,7 +4,7 @@
, scipy
, numpy
, numba
, scikitlearn
, scikit-learn
, pytest
, pythonOlder
}:
@ -25,7 +25,7 @@ buildPythonPackage rec {
scipy
numpy
numba
scikitlearn
scikit-learn
];
checkInputs = [

View file

@ -2,7 +2,7 @@
, buildPythonPackage
, fetchPypi
, nose
, scikitlearn
, scikit-learn
, scipy
, numba
, llvmlite
@ -19,7 +19,7 @@ buildPythonPackage rec {
};
propagatedBuildInputs = [
scikitlearn
scikit-learn
scipy
numba
llvmlite

View file

@ -3,7 +3,7 @@
, fetchFromGitHub
, isPy27
, numpy
, scikitlearn
, scikit-learn
, pytestCheckHook
, pytorch
, torchvision
@ -26,7 +26,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numpy
pytorch
scikitlearn
scikit-learn
torchvision
tqdm
];

View file

@ -14,7 +14,7 @@
, qiskit-ignis
, qiskit-terra
, quandl
, scikitlearn
, scikit-learn
, yfinance
# Optional inputs
, withTorch ? false
@ -59,7 +59,7 @@ buildPythonPackage rec {
qiskit-terra
qiskit-ignis
quandl
scikitlearn
scikit-learn
yfinance
] ++ lib.optionals (withTorch) [ pytorch ]
++ lib.optionals (withPyscf) [ pyscf ]

View file

@ -5,7 +5,7 @@
, python
, numpy
, qiskit-terra
, scikitlearn
, scikit-learn
, scipy
# Optional package inputs
, withVisualization ? false
@ -44,7 +44,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numpy
qiskit-terra
scikitlearn
scikit-learn
scipy
] ++ lib.optionals (withCvx) [ cvxpy ]
++ lib.optionals (withVisualization) [ matplotlib ]

View file

@ -5,7 +5,7 @@
, cython
, numpy
, scipy
, scikitlearn
, scikit-learn
, persim
, pytest
}:
@ -28,7 +28,7 @@ buildPythonPackage rec {
cython
numpy
scipy
scikitlearn
scikit-learn
persim
];

View file

@ -12,7 +12,7 @@
, pandas
, scipy
, hdmedians
, scikitlearn
, scikit-learn
, coverage
, python
, isPy3k
@ -30,7 +30,7 @@ buildPythonPackage rec {
buildInputs = [ cython ];
checkInputs = [ coverage ];
propagatedBuildInputs = [ lockfile cachecontrol decorator ipython matplotlib natsort numpy pandas scipy hdmedians scikitlearn ];
propagatedBuildInputs = [ lockfile cachecontrol decorator ipython matplotlib natsort numpy pandas scipy hdmedians scikit-learn ];
# cython package not included for tests
doCheck = false;

View file

@ -7,7 +7,8 @@
, glibcLocales
, numpy
, scipy
, pytest
, pytestCheckHook
, pytest-xdist
, pillow
, cython
, joblib
@ -54,17 +55,30 @@ buildPythonPackage rec {
joblib
threadpoolctl
];
checkInputs = [ pytest ];
checkInputs = [ pytestCheckHook pytest-xdist ];
LC_ALL="en_US.UTF-8";
doCheck = !stdenv.isAarch64;
# Skip test_feature_importance_regression - does web fetch
checkPhase = ''
cd $TMPDIR
HOME=$TMPDIR OMP_NUM_THREADS=1 pytest -k "not test_feature_importance_regression" --pyargs sklearn
preBuild = ''
export SKLEARN_BUILD_PARALLEL=$NIX_BUILD_CORES
'';
doCheck = !stdenv.isAarch64;
# Skip test_feature_importance_regression - does web fetch
disabledTests = [ "test_feature_importance_regression" ];
pytestFlagsArray = [ "-n" "$NIX_BUILD_CORES" "--pyargs" "sklearn" ];
preCheck = ''
cd $TMPDIR
export HOME=$TMPDIR
export OMP_NUM_THREADS=1
'';
pythonImportsCheck = [ "sklearn" ];
meta = with lib; {
description = "A set of python modules for machine learning and data mining";
changelog = let

View file

@ -5,7 +5,7 @@
, matplotlib
, numpy
, scipy
, scikitlearn
, scikit-learn
, pyaml
, pytestCheckHook
}:
@ -26,7 +26,7 @@ buildPythonPackage rec {
matplotlib
numpy
scipy
scikitlearn
scikit-learn
pyaml
];

View file

@ -3,7 +3,7 @@
, fetchFromGitHub
, numpy
, scipy
, scikitlearn
, scikit-learn
, matplotlib
, numba
, umap-learn
@ -32,7 +32,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numpy
scipy
scikitlearn
scikit-learn
matplotlib
numba
umap-learn

View file

@ -2,7 +2,7 @@
, buildPythonPackage
, fetchFromGitHub
, numpy
, scikitlearn
, scikit-learn
, perl
, pytestCheckHook
}:
@ -27,7 +27,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numpy
scikitlearn
scikit-learn
];
checkInputs = [

View file

@ -5,7 +5,7 @@
, pytestCheckHook
, numpy
, scipy
, scikitlearn
, scikit-learn
, pandas
, tqdm
, slicer
@ -30,7 +30,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numpy
scipy
scikitlearn
scikit-learn
pandas
tqdm
slicer

View file

@ -1,4 +1,4 @@
{ lib, buildPythonPackage, fetchFromGitHub, fetchpatch, numpy, scipy, deap, scikitlearn, python }:
{ lib, buildPythonPackage, fetchFromGitHub, fetchpatch, numpy, scipy, deap, scikit-learn, python }:
buildPythonPackage rec {
pname = "sklearn-deap";
@ -20,7 +20,7 @@ buildPythonPackage rec {
})
];
propagatedBuildInputs = [ numpy scipy deap scikitlearn ];
propagatedBuildInputs = [ numpy scipy deap scikit-learn ];
checkPhase = ''
${python.interpreter} test.py

View file

@ -8,7 +8,7 @@
, numpy
, pandas
, pytorch
, scikitlearn
, scikit-learn
, scipy
, tabulate
, tqdm
@ -23,7 +23,7 @@ buildPythonPackage rec {
sha256 = "9910f97339e654c8d38e0075d87b735e69e5eb11db59c527fb36705b30c8d0a4";
};
propagatedBuildInputs = [ numpy pytorch scikitlearn scipy tabulate tqdm ];
propagatedBuildInputs = [ numpy pytorch scikit-learn scipy tabulate tqdm ];
checkInputs = [ pytest pytestcov flaky pandas pytestCheckHook ];
disabledTests = [

View file

@ -9,7 +9,7 @@
, pyphen
, pytest
, requests
, scikitlearn
, scikit-learn
, scipy
, spacy
, srsly
@ -35,7 +35,7 @@ buildPythonPackage rec {
pyemd
pyphen
requests
scikitlearn
scikit-learn
scipy
spacy
srsly

View file

@ -3,7 +3,7 @@
, fetchFromGitHub
, nose
, numpy
, scikitlearn
, scikit-learn
, scipy
, numba
, pynndescent
@ -24,7 +24,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numpy
scikitlearn
scikit-learn
scipy
numba
pynndescent

View file

@ -11,7 +11,7 @@
, pygments
, numpy
, scipy
, scikitlearn }:
, scikit-learn }:
buildPythonPackage rec {
pname = "vowpalwabbit";
@ -36,7 +36,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
numpy
scikitlearn
scikit-learn
scipy
];

View file

@ -4,7 +4,7 @@
, fetchzip
, cython
, numpy
, scikitlearn
, scikit-learn
, six
, setuptools_scm
, gcc
@ -30,7 +30,7 @@ buildPythonPackage rec {
nativeBuildInputs = [ setuptools_scm gcc ];
propagatedBuildInputs = [ cython numpy scikitlearn six ];
propagatedBuildInputs = [ cython numpy scikit-learn six ];
checkInputs = [ pytest pytestcov ];

View file

@ -2,7 +2,7 @@
, pytestCheckHook
, cmake
, scipy
, scikitlearn
, scikit-learn
, stdenv
, xgboost
, substituteAll
@ -22,7 +22,7 @@ buildPythonPackage {
propagatedBuildInputs = [ scipy ];
checkInputs = [
pytestCheckHook
scikitlearn
scikit-learn
pandas
matplotlib
graphviz

View file

@ -1,10 +1,11 @@
{ lib, stdenv, fetchFromGitHub,
fetchHex, erlang, makeWrapper }:
fetchHex, erlang, makeWrapper,
writeScript, common-updater-scripts, coreutils, git, gnused, nix, rebar3-nix }:
let
version = "3.15.1";
owner = "erlang";
deps = import ./rebar-deps.nix { inherit fetchHex; };
deps = import ./rebar-deps.nix { inherit fetchFromGitHub fetchHex; };
rebar3 = stdenv.mkDerivation rec {
pname = "rebar3";
inherit version erlang;
@ -63,6 +64,31 @@ let
license = lib.licenses.asl20;
};
passthru.updateScript = writeScript "update.sh" ''
#!${stdenv.shell}
set -ox errexit
PATH=${
lib.makeBinPath [
common-updater-scripts
coreutils
git
gnused
nix
(rebar3WithPlugins { globalPlugins = [rebar3-nix]; })
]
}
latest=$(list-git-tags https://github.com/${owner}/${pname}.git | sed -n '/[\d\.]\+/p' | sort -V | tail -1)
if [ "$latest" != "${version}" ]; then
nixpkgs="$(git rev-parse --show-toplevel)"
nix_path="$nixpkgs/pkgs/development/tools/build-managers/rebar3"
update-source-version rebar3 "$latest" --version-key=version --print-changes --file="$nix_path/default.nix"
tmpdir=$(mktemp -d)
cp -R $(nix-build $nixpkgs --no-out-link -A rebar3.src)/* "$tmpdir"
(cd "$tmpdir" && rebar3 nix lock -o "$nix_path/rebar-deps.nix")
else
echo "rebar3 is already up-to-date"
fi
'';
};
rebar3WithPlugins = { plugins ? [ ], globalPlugins ? [ ] }:
let

View file

@ -1,4 +1,5 @@
{ fetchHex }:
# Generated by rebar3_nix
{ fetchHex, fetchFromGitHub }:
{
ssl_verify_fun = fetchHex {
pkg = "ssl_verify_fun";

View file

@ -2,16 +2,16 @@
rustPlatform.buildRustPackage rec {
pname = "cargo-cache";
version = "0.6.1";
version = "0.6.2";
src = fetchFromGitHub {
owner = "matthiaskrgr";
repo = pname;
rev = version;
sha256 = "sha256-qRwyNSAYuAnU17o/5zqKuvixQw7xfA6wNVzN6QRbZlY=";
sha256 = "sha256-/xP6TQcLyY1XC8r5SCkwej/I6fMaV5PqNNuvK1WbmeM=";
};
cargoSha256 = "sha256-qAq5B/BivQr8yuHtyFGTRigAa5dG2rboc0aD44/38FQ=";
cargoSha256 = "sha256-1ZNbqydRsXmMGLhqPrgNAE8bhpZCMAJO/YQbOvtiS/s=";
buildInputs = lib.optionals stdenv.isDarwin [ libiconv Security ];

View file

@ -11,16 +11,16 @@
rustPlatform.buildRustPackage rec {
pname = "cargo-crev";
version = "0.19.2";
version = "0.19.4";
src = fetchFromGitHub {
owner = "crev-dev";
repo = "cargo-crev";
rev = "v${version}";
sha256 = "sha256-aqvdAljAJsYtmxz/WtMrrnmJJRXDpqDjUn1LusoM8ns=";
sha256 = "sha256-XwwzMo06TdyOtGE9Z48mkEr6DnB/89wtMrW+UWr0G/Q=";
};
cargoSha256 = "sha256-KwnZmehh0vdR1eSPBrY6yHJR6r7mhIEgfN4soEBDTjU=";
cargoSha256 = "sha256-gA2Fg4CCi0W+GqJoNPZWw/OjNYh2U2UsC6eMZ9W1QN8=";
nativeBuildInputs = [ perl pkg-config ];

View file

@ -1,22 +1,24 @@
{ lib
, rustPlatform, fetchFromGitHub
, libusb1, pkg-config, rustfmt }:
, libusb1, libftdi1, pkg-config, rustfmt }:
rustPlatform.buildRustPackage rec {
pname = "cargo-embed";
version = "0.8.0";
version = "0.10.1";
src = fetchFromGitHub {
owner = "probe-rs";
repo = pname;
rev = "v${version}";
sha256 = "0klkgl7c42vhqxj6svw26lcr7rccq89bl17jn3p751x6281zvr35";
sha256 = "1z8n883cb4jca3phi9x2kwl01xclyr00l8jxgiyd28l2jik78i5k";
};
cargoSha256 = "1nqrij4j8787x7zqgdcscf8i436s19gwk08nyixhmf9sprcfb0ck";
cargoSha256 = "1ir9qngxmja6cm42m40jqbga9mlfjllm23ca26wyigjv3025pi6i";
nativeBuildInputs = [ pkg-config rustfmt ];
buildInputs = [ libusb1 ];
buildInputs = [ libusb1 libftdi1 ];
cargoBuildFlags = [ "--features=ftdi" ];
meta = with lib; {
description = "A cargo extension for working with microcontrollers";

View file

@ -23,9 +23,7 @@ rustPlatform.buildRustPackage rec {
cargoSha256 = "sha256-P7xyg9I1MhmiKlyAI9cvABcYKNxB6TSvTgMsMk5KxAQ=";
nativeBuildInputs = [ pkg-config rustfmt ];
buildInputs = [ libusb1 ]
++ lib.optionals (!stdenv.isDarwin) [ openssl ]
++ lib.optionals stdenv.isDarwin [ Security ];
buildInputs = [ libusb1 openssl ] ++ lib.optionals stdenv.isDarwin [ Security ];
meta = with lib; {
description = "A cargo extension for working with microcontrollers";

Some files were not shown because too many files have changed in this diff Show more