Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 169b36cbb4 | |||
| a398cf223f | |||
| 26d6f99ce4 | |||
| 5e775250ed | |||
| 1e1d2a3800 | |||
| f2f3ad5bc2 | |||
| 8abb0c9b4c | |||
| 10c16d0fb2 | |||
| dc38151cf9 | |||
| 3cd29b31e5 | |||
| 8f36d1fbf5 | |||
| 1bc36febc7 | |||
| 17d13f17d4 | |||
| a8233a2e83 | |||
| 8019407c69 | |||
| 016a641c38 | |||
| 5c2be8a078 | |||
| bfb6f5bf1b | |||
| 8b14fe2dbf | |||
| 175d46bdd9 |
@@ -6,6 +6,10 @@ additional packages to create a repository suitable for use on Compute
|
|||||||
Canada HPC clusters. It is available to use by default on Compute
|
Canada HPC clusters. It is available to use by default on Compute
|
||||||
Canada clusters. It's layout is modelled after the upstream one.
|
Canada clusters. It's layout is modelled after the upstream one.
|
||||||
|
|
||||||
|
:note: This Edition uses flake overlays for integration into flake
|
||||||
|
erivations readily, as well as hydraJobs specified for integration
|
||||||
|
into caches
|
||||||
|
|
||||||
|
|
||||||
# Using
|
# Using
|
||||||
|
|
||||||
|
|||||||
151
default.nix
151
default.nix
@@ -1,149 +1,2 @@
|
|||||||
with builtins;
|
self: super:
|
||||||
|
import ./pkgs/all-packages.nix self super
|
||||||
# This duplicates the overlay code in nixpkgs/pkgs/top-level/impure.nix (may require periodic resyncing).
|
|
||||||
#
|
|
||||||
# It has to be done this way because passing overlays to nixpkgs (required to insert our packages) disables all the
|
|
||||||
# default overlay mechanisms (required to pickup any user or system overlays).
|
|
||||||
|
|
||||||
let
|
|
||||||
|
|
||||||
# overlaysDefault - List of overlay functions nixpkgs would normally use if not provided.
|
|
||||||
#
|
|
||||||
# This is the order overlays are searched for in accordance to what is done in nixpkgs. If an overlay is found
|
|
||||||
# at a given level (the leading number) the search is stopped and they are used. If multiple overlays are found
|
|
||||||
# at a given level then an error is generated.
|
|
||||||
#
|
|
||||||
# 1. <nixpkgs-overlays> (i.e., nixpkgs-overlays.nix or nixpkgs-overlays/default.nix in NIX_PATH)
|
|
||||||
# 2. ~/.config/nixpkgs/overlays.nix
|
|
||||||
# 2. ~/.config/nixpkgs/overlays
|
|
||||||
#
|
|
||||||
# This code is lifted directly from nixpkgs/pkgs/top-level/impure.nix to ensure consistency.
|
|
||||||
#
|
|
||||||
overlaysDefault =
|
|
||||||
let
|
|
||||||
|
|
||||||
# try expression default - Replace expression with default on exception.
|
|
||||||
try = x: def: let res = tryEval x; in if res.success then res.value else def;
|
|
||||||
|
|
||||||
# isDir path - Is path a directory (requires access).
|
|
||||||
isDir = path: pathExists (path + "/.");
|
|
||||||
|
|
||||||
# overlaysRetrieve path - Retrieve a list of the overlay functions from path.
|
|
||||||
# path is file - import the file itself (should give a list of overlay functions)
|
|
||||||
# path is directory - list of imports of all the *.nix files in the directory (each should give an overlay function)
|
|
||||||
#
|
|
||||||
overlaysRetrieve = path:
|
|
||||||
if isDir path then
|
|
||||||
let content = readDir path; in
|
|
||||||
map (n: import (path + ("/" + n)))
|
|
||||||
(builtins.filter (n: builtins.match ".*\\.nix" n != null || pathExists (path + ("/" + n + "/default.nix")))
|
|
||||||
(attrNames content))
|
|
||||||
else
|
|
||||||
import path;
|
|
||||||
|
|
||||||
# pathOverlays - NIX_PATH nixpkgs-overlays file or "" if not found
|
|
||||||
pathOverlays = try <nixpkgs-overlays> "";
|
|
||||||
|
|
||||||
# homeDir - ~/
|
|
||||||
# homeOverlaysFile - ~/.config/nixpkgs/overlays.nix
|
|
||||||
# homeOverlaysDir - ~/.config/nixpkgs/overlays
|
|
||||||
#
|
|
||||||
homeDir = builtins.getEnv "HOME";
|
|
||||||
homeOverlaysFile = homeDir + "/.config/nixpkgs/overlays.nix";
|
|
||||||
homeOverlaysDir = homeDir + "/.config/nixpkgs/overlays";
|
|
||||||
|
|
||||||
in
|
|
||||||
|
|
||||||
if pathOverlays != "" && pathExists pathOverlays then overlaysRetrieve pathOverlays
|
|
||||||
else if pathExists homeOverlaysFile && pathExists homeOverlaysDir then
|
|
||||||
throw ''
|
|
||||||
Nixpkgs overlays can be specified with ${homeOverlaysFile} or ${homeOverlaysDir}, but not both.
|
|
||||||
Please remove one of them and try again.
|
|
||||||
''
|
|
||||||
else if pathExists homeOverlaysFile then
|
|
||||||
if isDir homeOverlaysFile then
|
|
||||||
throw (homeOverlaysFile + " should be a file")
|
|
||||||
else overlaysRetrieve homeOverlaysFile
|
|
||||||
else if pathExists homeOverlaysDir then
|
|
||||||
if !(isDir homeOverlaysDir) then
|
|
||||||
throw (homeOverlaysDir + " should be a directory")
|
|
||||||
else overlaysRetrieve homeOverlaysDir
|
|
||||||
else [];
|
|
||||||
|
|
||||||
|
|
||||||
# overlaysAlways - List of overlay functions provide by this package.
|
|
||||||
#
|
|
||||||
# The naming in this function is misleading but consistent with nixpkgs.
|
|
||||||
#
|
|
||||||
# self - final package set (stack fully closed) (-> pkgs in all-packages.nix)
|
|
||||||
# super - prior package set (stack closed up to us)
|
|
||||||
# res - next package set (stack closed up to and over us) (-> self in all-packages.nix)
|
|
||||||
#
|
|
||||||
overlaysAlways =
|
|
||||||
let
|
|
||||||
# knot - Feed final and up to and over us overlay results into overlay
|
|
||||||
#
|
|
||||||
# This matches what is done in nixpkgs (see pkgs/top-level/stage.nix).
|
|
||||||
#
|
|
||||||
knot = path: self: super:
|
|
||||||
let res = import path res super self; in res;
|
|
||||||
in
|
|
||||||
map knot [
|
|
||||||
./temporary/all-packages.nix
|
|
||||||
./pkgs/all-packages.nix
|
|
||||||
];
|
|
||||||
|
|
||||||
# nixpkgs - The underlying nixpkg to use.
|
|
||||||
#
|
|
||||||
# Get a usable nixpkgs, that is, one with a version that matches ours, or die.
|
|
||||||
#
|
|
||||||
nixpkgs =
|
|
||||||
let
|
|
||||||
|
|
||||||
# first check list - Return first element of list that passes check list otherwise aborts.
|
|
||||||
#
|
|
||||||
first = check: list:
|
|
||||||
if list == []
|
|
||||||
then builtins.throw ''
|
|
||||||
Unable to locate a suitable nixpkgs directory.
|
|
||||||
Most likely you want to git clone one into the top of the repo as 'upstream'.
|
|
||||||
''
|
|
||||||
else if check ( builtins.head list )
|
|
||||||
then builtins.head list
|
|
||||||
else first check ( builtins.tail list );
|
|
||||||
|
|
||||||
# okay path - Check if path exist and match our version number and otherwise print a warning.
|
|
||||||
#
|
|
||||||
# Version numbers are taken from ./version files.
|
|
||||||
#
|
|
||||||
okay =
|
|
||||||
let
|
|
||||||
version = builtins.readFile ./.version;
|
|
||||||
in
|
|
||||||
path:
|
|
||||||
( builtins.pathExists (path + /.version)
|
|
||||||
&& ( builtins.readFile (path + /.version) == version
|
|
||||||
|| builtins.trace ''
|
|
||||||
Skipping ${toString path} as not version ${version}.
|
|
||||||
'' false ) );
|
|
||||||
|
|
||||||
# paths - Paths to search for nixpkgs.
|
|
||||||
#
|
|
||||||
paths = [
|
|
||||||
./upstream # 1. build channel or git cloned one directly associated with us takes priority
|
|
||||||
<nixpkgs/upstream> # 2. if the channel is a version of ourselves the real one will be in a subdirectory
|
|
||||||
<nixpkgs> # 3. otherwise maybe the channel is a nixpkgs proper
|
|
||||||
];
|
|
||||||
|
|
||||||
in
|
|
||||||
|
|
||||||
first okay paths;
|
|
||||||
|
|
||||||
in
|
|
||||||
|
|
||||||
{ overlays ? overlaysDefault
|
|
||||||
, upstream ? nixpkgs
|
|
||||||
, ... } @ args:
|
|
||||||
|
|
||||||
import upstream ( builtins.removeAttrs args [ "upstream" ]
|
|
||||||
// { overlays = overlaysAlways ++ overlays; } )
|
|
||||||
|
|||||||
61
flake.lock
generated
Normal file
61
flake.lock
generated
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1705309234,
|
||||||
|
"narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1705957679,
|
||||||
|
"narHash": "sha256-Q8LJaVZGJ9wo33wBafvZSzapYsjOaNjP/pOnSiKVGHY=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "9a333eaa80901efe01df07eade2c16d183761fa3",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "release-23.05",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
39
flake.nix
Normal file
39
flake.nix
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
description = "Compute Canada Flake Packages (Overlay)";
|
||||||
|
|
||||||
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-23.05";
|
||||||
|
inputs.flake-utils.url = "github:numtide/flake-utils";
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs, flake-utils }: {
|
||||||
|
overlays = {
|
||||||
|
default = final: prev: import ./pkgs/all-packages.nix final prev;
|
||||||
|
};
|
||||||
|
} // (flake-utils.lib.eachDefaultSystem (system:
|
||||||
|
let
|
||||||
|
pkgs = import nixpkgs {
|
||||||
|
inherit system;
|
||||||
|
config.allowAliases = false;
|
||||||
|
config.permittedInsecurePackages = [
|
||||||
|
"python-2.7.18.6"
|
||||||
|
"python-2.7.18.6-env"
|
||||||
|
];
|
||||||
|
overlays = [ self.overlays.default ];
|
||||||
|
};
|
||||||
|
inherit (pkgs) lib;
|
||||||
|
overlayAttributes = lib.pipe (import ./. pkgs pkgs) [
|
||||||
|
builtins.attrNames
|
||||||
|
(lib.partition (n: lib.isDerivation pkgs.${n}))
|
||||||
|
];
|
||||||
|
attributesToAttrset = attributes: lib.pipe attributes [
|
||||||
|
(map (n: lib.nameValuePair n pkgs.${n}))
|
||||||
|
lib.listToAttrs
|
||||||
|
];
|
||||||
|
in {
|
||||||
|
lib = attributesToAttrset overlayAttributes.wrong;
|
||||||
|
packages = pkgs.lib.optionalAttrs
|
||||||
|
(system == "x86_64-linux" ) (attributesToAttrset overlayAttributes.right);
|
||||||
|
hydraJobs = pkgs.lib.optionalAttrs
|
||||||
|
(system == "x86_64-linux" ) self.packages;
|
||||||
|
}
|
||||||
|
));
|
||||||
|
}
|
||||||
@@ -1,8 +1,7 @@
|
|||||||
# self - next package set (after our additions)
|
# final - next package set (after our additions)
|
||||||
# super - prior package set (before our additions)
|
# prev - prior package set (before our additions)
|
||||||
# pkgs - final package set (after everyone's additions)
|
|
||||||
#
|
#
|
||||||
self: super: pkgs: with pkgs; {
|
final: prev: with prev; {
|
||||||
|
|
||||||
fastqc = callPackage ./fastqc.nix { };
|
fastqc = callPackage ./fastqc.nix { };
|
||||||
|
|
||||||
@@ -16,20 +15,21 @@ self: super: pkgs: with pkgs; {
|
|||||||
|
|
||||||
#meraculous = callPackage ./meraculous.nix { let stdenv49 = overrideCC stdenv gcc49; in stdenv = stdenv49; boost = boost159.override { stdenv = stdenv49 }; };
|
#meraculous = callPackage ./meraculous.nix { let stdenv49 = overrideCC stdenv gcc49; in stdenv = stdenv49; boost = boost159.override { stdenv = stdenv49 }; };
|
||||||
|
|
||||||
mfixgui = python36Packages.callPackage ./mfix/mfixgui.nix { };
|
mfixgui = python38Packages.callPackage ./mfix/mfixgui.nix { };
|
||||||
|
|
||||||
mfixsolver = callPackage ./mfix/mfixsolver.nix { };
|
mfixsolver = callPackage ./mfix/mfixsolver.nix { };
|
||||||
|
|
||||||
inherit (callPackage ./opengl/default.nix { inherit super; }) libGL mesa_glxgallium;
|
# inherit (callPackage ./opengl/default.nix { inherit prev; }) libGL mesa_glxgallium;
|
||||||
|
|
||||||
openfoam = callPackage ./openfoam.nix { };
|
openfoam = callPackage ./openfoam.nix { };
|
||||||
|
|
||||||
openslide = callPackage ./openslide { };
|
openslide = callPackage ./openslide { };
|
||||||
|
|
||||||
ovito = libsForQt5.callPackage ./ovito.nix { };
|
ovito = qt6Packages.callPackage ./ovito.nix { };
|
||||||
|
|
||||||
pythonOverrides = lib.composeExtensions super.pythonOverrides (import ./python-packages.nix pkgs);
|
pythonOverrides = lib.composeExtensions prev.pythonOverrides (import ./python-packages.nix pkgs);
|
||||||
|
|
||||||
rOverrides = lib.composeExtensions super.rOverrides (import ./r-modules.nix pkgs);
|
rOverrides = lib.composeExtensions prev.rOverrides (import ./r-modules.nix pkgs);
|
||||||
|
|
||||||
smoove = callPackage ./smoove { };
|
smoove = callPackage ./smoove { };
|
||||||
|
|
||||||
@@ -41,12 +41,12 @@ self: super: pkgs: with pkgs; {
|
|||||||
|
|
||||||
samblaster = callPackage ./samblaster.nix { };
|
samblaster = callPackage ./samblaster.nix { };
|
||||||
|
|
||||||
svtyper = pythonPackages.callPackage ./svtyper.nix { };
|
svtyper = python38Packages.callPackage ./svtyper.nix { };
|
||||||
|
|
||||||
idba = callPackage ./idba.nix { };
|
idba = callPackage ./idba.nix { };
|
||||||
|
|
||||||
nixStable = ( super.nixStable.override { confDir= "/nix/etc"; } ).overrideAttrs
|
# nixStable = ( prev.nixStable.override { confDir= "/nix/etc"; } ).overrideAttrs
|
||||||
( attrs: { patches = attrs.patches or [] ++ [ ./nix/remote.patch ./nix/cvmfs.patch ]; } );
|
# ( attrs: { patches = attrs.patches or [] ++ [ ./nix/remote.patch ./nix/cvmfs.patch ]; } );
|
||||||
nix = ( super.nix.override { confDir = "/nix/etc"; } ).overrideAttrs
|
nix = ( prev.nix.override { confDir = "/nix/etc"; } ).overrideAttrs
|
||||||
( attrs: { patches = attrs.patches or [] ++ [ ./nix/remote.patch ./nix/cvmfs.patch ]; } );
|
( attrs: { patches = attrs.patches or [] ++ [ ./nix/remote.patch ./nix/cvmfs.patch ]; } );
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchFromGitHub
|
{ stdenv, lib, fetchFromGitHub
|
||||||
, jdk, ant, makeWrapper
|
, jdk, ant, makeWrapper
|
||||||
, jre, perl }:
|
, jre, perl }:
|
||||||
|
|
||||||
@@ -25,7 +25,7 @@ stdenv.mkDerivation rec {
|
|||||||
makeWrapper $out/share/FastQC/fastqc $out/bin/fastqc --add-flags --java=${jre}/bin/java
|
makeWrapper $out/share/FastQC/fastqc $out/bin/fastqc --add-flags --java=${jre}/bin/java
|
||||||
'';
|
'';
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = https://www.bioinformatics.babraham.ac.uk/projects/fastqc/;
|
homepage = https://www.bioinformatics.babraham.ac.uk/projects/fastqc/;
|
||||||
description = "A quality control tool for high throughput sequence data";
|
description = "A quality control tool for high throughput sequence data";
|
||||||
longDescription = ''
|
longDescription = ''
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchurl, cmake, gfortran
|
{ stdenv, lib, fetchurl, cmake, gfortran
|
||||||
, netcdf, hdf5 }:
|
, netcdf, hdf5 }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
@@ -21,7 +21,7 @@ stdenv.mkDerivation rec {
|
|||||||
patches = ./exodus-include.patch;
|
patches = ./exodus-include.patch;
|
||||||
sourceRoot = "${name}/exodus";
|
sourceRoot = "${name}/exodus";
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = "http://gsjaardema.github.io/seacas";
|
homepage = "http://gsjaardema.github.io/seacas";
|
||||||
description = "A model developed to store and retrieve data for finite element analyses.";
|
description = "A model developed to store and retrieve data for finite element analyses.";
|
||||||
license = licenses.bsd3;
|
license = licenses.bsd3;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchFromGitHub, cmake
|
{ stdenv, lib, fetchFromGitHub, cmake
|
||||||
, openmpi }:
|
, openmpi }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
@@ -22,7 +22,7 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
makeFlags = [ "VERBOSE=1" ];
|
makeFlags = [ "VERBOSE=1" ];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = "http://www.llnl.gov/casc/hypre";
|
homepage = "http://www.llnl.gov/casc/hypre";
|
||||||
description = "Parallel solvers for sparse linear systems featuring multigrid methods.";
|
description = "Parallel solvers for sparse linear systems featuring multigrid methods.";
|
||||||
license = licenses.gpl2;
|
license = licenses.gpl2;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchFromBitbucket, cmake }:
|
{ stdenv, lib, fetchFromBitbucket, cmake }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
version = "5.1.0-p4";
|
version = "5.1.0-p4";
|
||||||
@@ -17,7 +17,7 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
makeFlags = [ "VERBOSE=1" ];
|
makeFlags = [ "VERBOSE=1" ];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = "https://bitbucket.org/petsc/metis";
|
homepage = "https://bitbucket.org/petsc/metis";
|
||||||
description = "A set of serial programs for partitioning graphs, partitioning finite element meshes, and producing fill reducing orderings for sparse matrices.";
|
description = "A set of serial programs for partitioning graphs, partitioning finite element meshes, and producing fill reducing orderings for sparse matrices.";
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchFromBitbucket, cmake
|
{ stdenv, lib, fetchFromBitbucket, cmake
|
||||||
, openmpi
|
, openmpi
|
||||||
, metis }:
|
, metis }:
|
||||||
|
|
||||||
@@ -22,7 +22,7 @@ stdenv.mkDerivation rec {
|
|||||||
cmakeFlags = [ "-DGKLIB_PATH=../headers" ];
|
cmakeFlags = [ "-DGKLIB_PATH=../headers" ];
|
||||||
makeFlags = [ "VERBOSE=1" ];
|
makeFlags = [ "VERBOSE=1" ];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = "http://glaros.dtc.umn.edu/gkhome/metis/parmetis";
|
homepage = "http://glaros.dtc.umn.edu/gkhome/metis/parmetis";
|
||||||
description = "An MPI-based parallel library that implements a variety of algorithms for partitioning unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices.";
|
description = "An MPI-based parallel library that implements a variety of algorithms for partitioning unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices.";
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, callPackage, writeText, fetchFromGitHub, python, gfortran
|
{ stdenv, lib, callPackage, writeText, fetchFromGitHub, python, gfortran
|
||||||
, openmpi, blas, liblapack, hdf5, netcdf, eigen
|
, openmpi, blas, liblapack, hdf5, netcdf, eigen
|
||||||
, sowing, metis, hypre, parmetis, exodus }:
|
, sowing, metis, hypre, parmetis, exodus }:
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
setupHook = writeText "setupHook.sh" "export PETSC_DIR=@out@";
|
setupHook = writeText "setupHook.sh" "export PETSC_DIR=@out@";
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = "https://github.com/firedrakeproject/petsc";
|
homepage = "https://github.com/firedrakeproject/petsc";
|
||||||
description = "A suite of data structures and routines for the scalable (parallel) solution of scientific applications modeled by partial differential equations.";
|
description = "A suite of data structures and routines for the scalable (parallel) solution of scientific applications modeled by partial differential equations.";
|
||||||
license = licenses.bsd2;
|
license = licenses.bsd2;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchFromBitbucket, ghostscript }:
|
{ stdenv, lib, fetchFromBitbucket, ghostscript }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
version = "1.1.25-p1";
|
version = "1.1.25-p1";
|
||||||
@@ -16,7 +16,7 @@ stdenv.mkDerivation rec {
|
|||||||
ghostscript
|
ghostscript
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = "https://bitbucket.org/petsc/pkg-sowing";
|
homepage = "https://bitbucket.org/petsc/pkg-sowing";
|
||||||
description = "Tools that are part of the petsc program development and maintenance environment.";
|
description = "Tools that are part of the petsc program development and maintenance environment.";
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchsvn, autoreconfHook, pkgconfig
|
{ stdenv, lib, fetchsvn, autoreconfHook, pkg-config
|
||||||
, libxml2, python2Packages, swig }:
|
, libxml2, python2Packages, swig }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
@@ -37,7 +37,7 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
autoreconfHook
|
autoreconfHook
|
||||||
pkgconfig
|
pkg-config
|
||||||
libxml2
|
libxml2
|
||||||
python2Packages.python
|
python2Packages.python
|
||||||
swig
|
swig
|
||||||
@@ -45,7 +45,7 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
hardeningDisable = [ "format" ];
|
hardeningDisable = [ "format" ];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
description = "The General Hidden Markov Model library (GHMM) is a freely available C library implementing efficient data structures and algorithms for basic and extended HMMs with discrete and continous emissions.";
|
description = "The General Hidden Markov Model library (GHMM) is a freely available C library implementing efficient data structures and algorithms for basic and extended HMMs with discrete and continous emissions.";
|
||||||
homepage = http://ghmm.sourceforge.net;
|
homepage = http://ghmm.sourceforge.net;
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# This file was generated by https://github.com/kamilchm/go2nix v1.3.0
|
# This file was generated by https://github.com/kamilchm/go2nix v1.3.0
|
||||||
{ stdenv, buildGoPackage, fetchgit, fetchhg, fetchbzr, fetchsvn }:
|
{ stdenv, lib, buildGoPackage, fetchgit, fetchhg, fetchbzr, fetchsvn }:
|
||||||
|
|
||||||
buildGoPackage rec {
|
buildGoPackage rec {
|
||||||
name = "gsort-${version}";
|
name = "gsort-${version}";
|
||||||
@@ -15,7 +15,7 @@ buildGoPackage rec {
|
|||||||
|
|
||||||
goDeps = ./deps.nix;
|
goDeps = ./deps.nix;
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = https://github.com/brentp/gsort;
|
homepage = https://github.com/brentp/gsort;
|
||||||
description = "A tool to sort genomic files according to a genomefile.";
|
description = "A tool to sort genomic files according to a genomefile.";
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchurl
|
{ stdenv, lib, fetchurl
|
||||||
, bash }:
|
, bash }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
@@ -25,7 +25,7 @@ stdenv.mkDerivation rec {
|
|||||||
# stdenv
|
# stdenv
|
||||||
# ];
|
# ];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = https://github.com/loneknightpy/idba/;
|
homepage = https://github.com/loneknightpy/idba/;
|
||||||
description = "Basic iterative de Bruijn graph assembler for second-generation sequencing reads";
|
description = "Basic iterative de Bruijn graph assembler for second-generation sequencing reads";
|
||||||
longDescription = "IDBA is the basic iterative de Bruijn graph assembler for second-generation sequencing reads. IDBA-UD, an extension of IDBA, is designed to utilize paired-end reads to assemble low-depth regions and use progressive depth on contigs to reduce errors in high-depth regions. It is a generic purpose assembler and especially good for single-cell and metagenomic sequencing data. IDBA-Hybrid is another update version of IDBA-UD, which can make use of a similar reference genome to improve assembly result. IDBA-Tran is an iterative de Bruijn graph assembler for RNA-Seq data.";
|
longDescription = "IDBA is the basic iterative de Bruijn graph assembler for second-generation sequencing reads. IDBA-UD, an extension of IDBA, is designed to utilize paired-end reads to assemble low-depth regions and use progressive depth on contigs to reduce errors in high-depth regions. It is a generic purpose assembler and especially good for single-cell and metagenomic sequencing data. IDBA-Hybrid is another update version of IDBA-UD, which can make use of a similar reference genome to improve assembly result. IDBA-Tran is an iterative de Bruijn graph assembler for RNA-Seq data.";
|
||||||
|
|||||||
@@ -1,8 +1,27 @@
|
|||||||
{ stdenv, fetchFromGitHub, which, autoconf, automake
|
{ stdenv, lib, fetchFromGitHub, which, autoconf, automake
|
||||||
, zlib, curl
|
, zlib, curl
|
||||||
, python, hexdump, sambamba, samblaster, samtools }:
|
, python2, fetchPypi, hexdump, sambamba, samblaster, samtools }: let
|
||||||
|
python = python2.override {
|
||||||
stdenv.mkDerivation rec {
|
packageOverrides = final: prev: {
|
||||||
|
numpy = prev.numpy.overridePythonAttrs(old: rec {
|
||||||
|
version = "1.16.6";
|
||||||
|
disabled = false;
|
||||||
|
src = fetchPypi {
|
||||||
|
inherit version;
|
||||||
|
pname = "numpy";
|
||||||
|
extension = "zip";
|
||||||
|
sha256 = "e5cf3fdf13401885e8eea8170624ec96225e2174eb0c611c6f26dd33b489e3ff";
|
||||||
|
};
|
||||||
|
nativeCheckInputs = [
|
||||||
|
prev.pytest
|
||||||
|
# "hypothesis" indirectly depends on numpy to build its documentation.
|
||||||
|
prev.hypothesis
|
||||||
|
prev.typing-extensions
|
||||||
|
];
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in stdenv.mkDerivation rec {
|
||||||
version = "0.3.0";
|
version = "0.3.0";
|
||||||
name = "lumpy-${version}";
|
name = "lumpy-${version}";
|
||||||
|
|
||||||
@@ -39,14 +58,19 @@ stdenv.mkDerivation rec {
|
|||||||
automake
|
automake
|
||||||
zlib
|
zlib
|
||||||
curl
|
curl
|
||||||
(python.withPackages (packages: with packages; [ numpy pysam ]))
|
(python.withPackages (packages: with packages;
|
||||||
|
[
|
||||||
|
numpy
|
||||||
|
pysam
|
||||||
|
]
|
||||||
|
))
|
||||||
hexdump
|
hexdump
|
||||||
sambamba
|
sambamba
|
||||||
samblaster
|
samblaster
|
||||||
samtools
|
samtools
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
description = "A probabilistic framework for structural variant discovery.";
|
description = "A probabilistic framework for structural variant discovery.";
|
||||||
homepage = https://github.com/arq5x/lumpy-sv;
|
homepage = https://github.com/arq5x/lumpy-sv;
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, overrideCC, gcc49, fetchurl, makeWrapper, gzip, cmake
|
{ stdenv, lib, overrideCC, gcc49, fetchurl, makeWrapper, gzip, cmake
|
||||||
, boost, perl, perlPackages, gnuplot }:
|
, boost, perl, perlPackages, gnuplot }:
|
||||||
|
|
||||||
# gcc 5.x c++11 requires istream to be explicitly cast to bool
|
# gcc 5.x c++11 requires istream to be explicitly cast to bool
|
||||||
@@ -30,7 +30,7 @@ stdenv.mkDerivation rec {
|
|||||||
gnuplot
|
gnuplot
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = http://jgi.doe.gov/data-and-tools/meraculous;
|
homepage = http://jgi.doe.gov/data-and-tools/meraculous;
|
||||||
description = "Meraculous is a whole genome assembler for Next Generation Sequencing data geared for large genomes.";
|
description = "Meraculous is a whole genome assembler for Next Generation Sequencing data geared for large genomes.";
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ lib, fetchurl, fetchFromGitHub, buildPythonApplication, sphinx, sphinx_rtd_theme
|
{ lib, fetchurl, fetchFromGitHub, buildPythonApplication, sphinx, sphinx-rtd-theme
|
||||||
, mfixsolver
|
, mfixsolver
|
||||||
, numpy, psutil, pyqtgraph, qtpy, pyqt5, vtk, requests, simpleeval, simplejson }:
|
, numpy, psutil, pyqtgraph, qtpy, pyqt5, vtk, requests, simpleeval, simplejson }:
|
||||||
|
|
||||||
@@ -65,7 +65,7 @@ buildPythonApplication rec {
|
|||||||
'';
|
'';
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
sphinx sphinx_rtd_theme
|
sphinx sphinx-rtd-theme
|
||||||
];
|
];
|
||||||
|
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{ stdenv, fetchurl, fetchFromGitHub
|
{ stdenv, lib, fetchurl, fetchFromGitHub
|
||||||
, cmake, gfortran, python, pkgconfig
|
, cmake, gfortran, python38, pkg-config
|
||||||
, openmpi, netcdf, boost }:
|
, openmpi, netcdf, boost }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
@@ -55,8 +55,8 @@ stdenv.mkDerivation rec {
|
|||||||
];
|
];
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
python
|
python38
|
||||||
pkgconfig
|
pkg-config
|
||||||
];
|
];
|
||||||
|
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
@@ -67,7 +67,7 @@ stdenv.mkDerivation rec {
|
|||||||
# netcdf
|
# netcdf
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = https://mfix.netl.doe.gov;
|
homepage = https://mfix.netl.doe.gov;
|
||||||
description = "An open-source multiphase flow solver";
|
description = "An open-source multiphase flow solver";
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, bashInteractive, fetchFromGitHub, makeWrapper, flex, bison
|
{ stdenv, lib, bashInteractive, fetchFromGitHub, makeWrapper, flex, bison
|
||||||
, zlib, boost, openmpi, readline, gperftools, cgal, metis, scotch, mpfr }:
|
, zlib, boost, openmpi, readline, gperftools, cgal, metis, scotch, mpfr }:
|
||||||
|
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ stdenv.mkDerivation rec {
|
|||||||
mpfr
|
mpfr
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = http://www.openfoam.com;
|
homepage = http://www.openfoam.com;
|
||||||
description = "Free open-source CFD software";
|
description = "Free open-source CFD software";
|
||||||
platforms = platforms.linux;
|
platforms = platforms.linux;
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
# Add a setup hook to the mesa package that automatically adds a
|
# Add a setup hook to the mesa package that automatically adds a
|
||||||
# libvglfaker.so dependency to executables that depend on libGL.so.
|
# libvglfaker.so dependency to executables that depend on libGL.so.
|
||||||
|
|
||||||
{ super, stdenv, buildEnv, substituteAll, bash
|
{ prev, stdenv, buildEnv, substituteAll, bash
|
||||||
, autoreconfHook, pkgconfig, python2
|
, autoreconfHook, pkg-config, python2
|
||||||
, xorg, llvmPackages, expat, mesa_glxgallium, mesa, libglvnd }:
|
, xorg, llvmPackages, expat, mesa_glxgallium, mesa, libglvnd }:
|
||||||
|
|
||||||
let
|
let
|
||||||
@@ -13,23 +13,23 @@ let
|
|||||||
# Ugliness required to break the loop created by the fact that
|
# Ugliness required to break the loop created by the fact that
|
||||||
# the libGL hook requires VirtualGL which requires libGL.
|
# the libGL hook requires VirtualGL which requires libGL.
|
||||||
#
|
#
|
||||||
# This would be clean if super was closed on itself (nixpkgs #15280)
|
# This would be clean if prev was closed on itself (nixpkgs #15280)
|
||||||
|
|
||||||
libGL = super.libGL;
|
libGL = prev.libGL;
|
||||||
libGLU = super.libGLU.override { inherit libGL; };
|
libGLU = prev.libGLU.override { inherit libGL; };
|
||||||
libGLU_combined = buildEnv {
|
libGLU_combined = buildEnv {
|
||||||
name = "libGLU-combined";
|
name = "libGLU-combined";
|
||||||
paths = [ libGL libGLU ];
|
paths = [ libGL libGLU ];
|
||||||
extraOutputsToInstall = [ "dev" ];
|
extraOutputsToInstall = [ "dev" ];
|
||||||
};
|
};
|
||||||
|
|
||||||
virtualglLib = (super.virtualglLib.override { inherit libGL libGLU fltk; }).overrideAttrs (attrs: {
|
virtualglLib = (prev.virtualglLib.override { inherit libGL libGLU fltk; }).overrideAttrs (attrs: {
|
||||||
postFixup = attrs.postFixup or "" + ''
|
postFixup = attrs.postFixup or "" + ''
|
||||||
patchelf --set-rpath /usr/lib${stdenv.lib.optionalString stdenv.is64bit "64"}/nvidia:"$(patchelf --print-rpath $out/lib/libvglfaker.so)" $out/lib/libvglfaker.so
|
patchelf --set-rpath /usr/lib${stdenv.lib.optionalString stdenv.is64bit "64"}/nvidia:"$(patchelf --print-rpath $out/lib/libvglfaker.so)" $out/lib/libvglfaker.so
|
||||||
'';
|
'';
|
||||||
} );
|
} );
|
||||||
fltk = super.fltk.override { inherit libGL libGLU freeglut; };
|
fltk = prev.fltk.override { inherit libGL libGLU; };
|
||||||
freeglut = super.freeglut.override { inherit libGL libGLU; };
|
freeglut = prev.freeglut.override { inherit libGL libGLU; };
|
||||||
in
|
in
|
||||||
substituteAll {
|
substituteAll {
|
||||||
src = ./insert-virtualgl.sh;
|
src = ./insert-virtualgl.sh;
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{ stdenv, autoreconfHook, pkgconfig, fetchFromGitHub
|
{ stdenv, lib, autoreconfHook, pkg-config, fetchFromGitHub
|
||||||
, zlib, openjpeg, libtiff, cairo, libpng, gdk_pixbuf, libxml2, sqlite }:
|
, zlib, openjpeg, libtiff, cairo, libpng, gdk-pixbuf, libxml2, sqlite }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
version = "3.4.1";
|
version = "3.4.1";
|
||||||
@@ -14,13 +14,13 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
autoreconfHook
|
autoreconfHook
|
||||||
pkgconfig
|
pkg-config
|
||||||
zlib
|
zlib
|
||||||
openjpeg
|
openjpeg
|
||||||
libtiff
|
libtiff
|
||||||
cairo
|
cairo
|
||||||
libpng
|
libpng
|
||||||
gdk_pixbuf
|
gdk-pixbuf
|
||||||
libxml2
|
libxml2
|
||||||
sqlite
|
sqlite
|
||||||
];
|
];
|
||||||
@@ -30,7 +30,7 @@ stdenv.mkDerivation rec {
|
|||||||
sed -i -e 's|AM_CONDITIONAL(\[WINDOWS_RESOURCES\], \[test x$RC != x\])|AM_CONDITIONAL([WINDOWS_RESOURCES], [false])|' configure.ac
|
sed -i -e 's|AM_CONDITIONAL(\[WINDOWS_RESOURCES\], \[test x$RC != x\])|AM_CONDITIONAL([WINDOWS_RESOURCES], [false])|' configure.ac
|
||||||
'';
|
'';
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = http://openslide.org;
|
homepage = http://openslide.org;
|
||||||
description = "A C library that provides a simple interface to read whole-slide images.";
|
description = "A C library that provides a simple interface to read whole-slide images.";
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,30 +1,55 @@
|
|||||||
{ mkDerivation, lib, fetchFromGitLab, cmake
|
{ stdenv
|
||||||
, boost, netcdf, hdf5, fftwSinglePrec, muparser, openssl, ffmpeg, python
|
, lib
|
||||||
, qtbase, qtsvg, qttools, qscintilla }:
|
, fetchFromGitLab
|
||||||
|
, cmake
|
||||||
|
, ninja
|
||||||
|
, wrapQtAppsHook
|
||||||
|
, qtbase
|
||||||
|
, qtnetworkauth
|
||||||
|
, qtscxml
|
||||||
|
, qtsvg
|
||||||
|
, ffmpeg
|
||||||
|
, netcdf
|
||||||
|
, libssh
|
||||||
|
, boost
|
||||||
|
, hdf5
|
||||||
|
, python3 ? python3
|
||||||
|
}:
|
||||||
|
|
||||||
mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
pname = "ovito";
|
pname = "ovito";
|
||||||
version = "3.4.0";
|
version = "3.8.4";
|
||||||
|
|
||||||
src = fetchFromGitLab {
|
src = fetchFromGitLab {
|
||||||
owner = "stuko";
|
owner = "stuko";
|
||||||
repo = "ovito";
|
repo = "ovito";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "1y3wr6yzpsl0qm7cicp2mppfszxd0fgx8hm99in9wff9qd0r16b5";
|
sha256 = "Vw9mB+huQHLg8OqyGo9KEg55ag3NQG+nTJj3tB++eQk=";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [
|
nativeBuildInputs = [
|
||||||
cmake
|
cmake
|
||||||
boost
|
ninja
|
||||||
netcdf hdf5
|
wrapQtAppsHook
|
||||||
fftwSinglePrec
|
|
||||||
muparser
|
|
||||||
openssl
|
|
||||||
ffmpeg
|
|
||||||
python
|
|
||||||
qtbase qtsvg qttools qscintilla
|
|
||||||
];
|
];
|
||||||
|
|
||||||
|
buildInputs = [
|
||||||
|
qtbase
|
||||||
|
qtnetworkauth
|
||||||
|
qtscxml
|
||||||
|
qtsvg
|
||||||
|
ffmpeg
|
||||||
|
netcdf
|
||||||
|
libssh
|
||||||
|
boost
|
||||||
|
hdf5
|
||||||
|
python3
|
||||||
|
];
|
||||||
|
|
||||||
|
cmakeFlags = [
|
||||||
|
"-DOVITO_BUILD_PYPI=${if python3 != null then "ON" else "OFF"}"
|
||||||
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
description = "Scientific visualization and analysis software for atomistic and particle simulation data";
|
description = "Scientific visualization and analysis software for atomistic and particle simulation data";
|
||||||
homepage = https://ovito.org;
|
homepage = https://ovito.org;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchFromGitHub, ldc
|
{ stdenv, lib, fetchFromGitHub, ldc
|
||||||
, python3, glibc, which, zlib }:
|
, python3, glibc, which, zlib }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
@@ -27,7 +27,7 @@ stdenv.mkDerivation rec {
|
|||||||
zlib
|
zlib
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = https://github.com/lomereiter/sambamba/wiki;
|
homepage = https://github.com/lomereiter/sambamba/wiki;
|
||||||
description = "Sambamba is a set of command-line tools for working with SAM/BAM file formats which are used for storing next generation sequencing (NGS) datasets.";
|
description = "Sambamba is a set of command-line tools for working with SAM/BAM file formats which are used for storing next generation sequencing (NGS) datasets.";
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchFromGitHub }:
|
{ stdenv, lib, fetchFromGitHub }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
version = "0.1.24";
|
version = "0.1.24";
|
||||||
@@ -19,7 +19,7 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
buildInputs = [ ];
|
buildInputs = [ ];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
description = "A fast and flexible program for marking duplicates in read-id grouped paired-end SAM files.";
|
description = "A fast and flexible program for marking duplicates in read-id grouped paired-end SAM files.";
|
||||||
homepage = https://github.com/GregoryFaust/samblaster;
|
homepage = https://github.com/GregoryFaust/samblaster;
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# This file was generated by https://github.com/kamilchm/go2nix v1.3.0
|
# This file was generated by https://github.com/kamilchm/go2nix v1.3.0
|
||||||
{ stdenv, buildGoPackage, fetchgit, fetchhg, fetchbzr, fetchsvn, makeWrapper
|
{ stdenv, lib, buildGoPackage, fetchgit, fetchhg, fetchbzr, fetchsvn, makeWrapper
|
||||||
, gzip, gsort, htslib, lumpy, samtools, svtyper, mosdepth }:
|
, gzip, gsort, htslib, lumpy, samtools, svtyper, mosdepth }:
|
||||||
|
|
||||||
buildGoPackage rec {
|
buildGoPackage rec {
|
||||||
@@ -31,7 +31,7 @@ buildGoPackage rec {
|
|||||||
makeWrapper
|
makeWrapper
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = https://github.com/brentp/smoove;
|
homepage = https://github.com/brentp/smoove;
|
||||||
description = "Wrap existing software and adds some internal read-filtering to simplify calling and genotyping structural variants.";
|
description = "Wrap existing software and adds some internal read-filtering to simplify calling and genotyping structural variants.";
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ stdenv, fetchurl, unzip
|
{ stdenv, lib, fetchurl, unzip
|
||||||
, lzma, bzip2, zlib, libjpeg, libtiff, openssl
|
, lzma, bzip2, zlib, libjpeg, libtiff, openssl
|
||||||
, mesa, xorg
|
, mesa, xorg
|
||||||
, fox, xercesc, proj, gdal, hdf5, curl }:
|
, fox, xercesc, proj, gdal, hdf5, curl }:
|
||||||
@@ -24,7 +24,7 @@ stdenv.mkDerivation rec {
|
|||||||
curl
|
curl
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
meta = with lib; {
|
||||||
homepage = http://software.dlr.de/p/sumo/home;
|
homepage = http://software.dlr.de/p/sumo/home;
|
||||||
description = "SUMO is an open source, highly portable, microscopic and continuous road traffic simulation package designed to handle large road networks";
|
description = "SUMO is an open source, highly portable, microscopic and continuous road traffic simulation package designed to handle large road networks";
|
||||||
platforms = platforms.all;
|
platforms = platforms.all;
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{ lib, fetchPypi, buildPythonApplication
|
{ lib, fetchPypi, buildPythonApplication
|
||||||
, scipy, cytoolz, pysam
|
, scipy, cytoolz, pysam
|
||||||
, pytest, pytestrunner }:
|
, pytest, pytest-runner }:
|
||||||
|
|
||||||
buildPythonApplication rec {
|
buildPythonApplication rec {
|
||||||
version = "0.7.1";
|
version = "0.7.1";
|
||||||
@@ -16,7 +16,7 @@ buildPythonApplication rec {
|
|||||||
--replace o"pytest==4.6.4" "pytest"
|
--replace o"pytest==4.6.4" "pytest"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
nativeBuildInputs = [ pytestrunner ];
|
nativeBuildInputs = [ pytest-runner ];
|
||||||
propagatedBuildInputs = [ scipy cytoolz pysam ];
|
propagatedBuildInputs = [ scipy cytoolz pysam ];
|
||||||
checkInputs = [ pytest ];
|
checkInputs = [ pytest ];
|
||||||
|
|
||||||
|
|||||||
167
release.nix
167
release.nix
@@ -1,167 +0,0 @@
|
|||||||
{ ccpkgs # Checked out ccpkgs git repo
|
|
||||||
? { outPath = ./.;
|
|
||||||
revCount = 1234 + 149;
|
|
||||||
shortRev = "gabcdef";
|
|
||||||
}
|
|
||||||
, nixpkgs # Checked out nixpkgs git repe
|
|
||||||
? { outPath = ./upstream;
|
|
||||||
revCount = 5678 + 242760;
|
|
||||||
shortRev = "gfedcba";
|
|
||||||
}
|
|
||||||
, nixpkgsArgs ? { config = { allowUnfree = true; inHydra = true; }; }
|
|
||||||
, stableBranch ? false
|
|
||||||
, supportedSystems ? [ "x86_64-linx" ] }:
|
|
||||||
|
|
||||||
|
|
||||||
# Free up the nixpkgs and ccpkgs name for binding to final product.
|
|
||||||
#
|
|
||||||
let
|
|
||||||
# *Git - git checkout
|
|
||||||
#
|
|
||||||
nixpkgsGit = nixpkgs;
|
|
||||||
ccpkgsGit = ccpkgs;
|
|
||||||
|
|
||||||
# Tarball hydra release product for the ccpkgs channel.
|
|
||||||
#
|
|
||||||
# 1. Generate an unpacked channel for the associated nixpkgs in the store.
|
|
||||||
# 2. Symlink this into the top-level directory as upstream (nixpkgs is used).
|
|
||||||
# 3. Stick in the standard version files.
|
|
||||||
# 4. Let releaseTools.sourceTarball handle the details of exposing it as a hydra product.
|
|
||||||
#
|
|
||||||
in let
|
|
||||||
|
|
||||||
# pkgs - Packages sucked in from the given ccpkgs git version.
|
|
||||||
#
|
|
||||||
pkgs = import ccpkgsGit ( nixpkgsArgs // { upstream = nixpkgsGit; } );
|
|
||||||
|
|
||||||
# version - Version number (e.g., "17.09")
|
|
||||||
# version*Suffix - Version suffix (e.g., "5678.gfedcba")
|
|
||||||
#
|
|
||||||
# Code lifted from nixos/release.nix.
|
|
||||||
#
|
|
||||||
version =
|
|
||||||
let
|
|
||||||
versionNixpkgs = pkgs.lib.fileContents (nixpkgsGit + /.version);
|
|
||||||
versionCcpkgs = pkgs.lib.fileContents ./.version;
|
|
||||||
in
|
|
||||||
assert (versionNixpkgs == versionCcpkgs); versionCcpkgs;
|
|
||||||
|
|
||||||
versionSeparator = if stableBranch then "." else "pre";
|
|
||||||
|
|
||||||
mkVersion = base: git: rec {
|
|
||||||
count = toString (git.revCount - base);
|
|
||||||
commit = git.shortRev;
|
|
||||||
suffix = "${versionSeparator}${count}.${commit}";
|
|
||||||
};
|
|
||||||
|
|
||||||
nixpkgsVersion = mkVersion 242760 nixpkgsGit;
|
|
||||||
ccpkgsVersion = mkVersion 149 ccpkgsGit;
|
|
||||||
|
|
||||||
versionSuffix = "${versionSeparator}${ccpkgsVersion.count}.${nixpkgsVersion.count}.${ccpkgsVersion.commit}.${nixpkgsVersion.commit}";
|
|
||||||
|
|
||||||
# nixpkgs - The store path containing the unpacked nixpkgs channel.
|
|
||||||
#
|
|
||||||
# 1. Generate a channel tarball from the git repo via the nixos/release.nix expression for hydra.
|
|
||||||
# 2. Unpack this into the store using the nix/unpack-channel.nix expression used by nix-channel.
|
|
||||||
#
|
|
||||||
nixpkgs =
|
|
||||||
let
|
|
||||||
# channel - Store path containing the channel release tarballs.
|
|
||||||
#
|
|
||||||
# The nixos.channel attribute from nixos/release.nix builds the nixpkgs channel tarballs.
|
|
||||||
#
|
|
||||||
channel =
|
|
||||||
( import (nixpkgsGit + /nixos/release.nix) {
|
|
||||||
inherit stableBranch supportedSystems;
|
|
||||||
nixpkgs = nixpkgsGit;
|
|
||||||
} ).channel;
|
|
||||||
|
|
||||||
# nixpkgs - The store path containing the unpacked nixpkgs channel.
|
|
||||||
#
|
|
||||||
# The nix provided nix/unpack-channel.nix function extracts the nixpkgs from the channel,
|
|
||||||
#
|
|
||||||
nixpkgs =
|
|
||||||
( pkgs.callPackage ./unpack-channel.nix { } rec {
|
|
||||||
name = "nixos";
|
|
||||||
channelName = "${name}-${version}";
|
|
||||||
src = channel + /tarballs + "/${name}-${version}${nixpkgsVersion.suffix}.tar.xz";
|
|
||||||
} );
|
|
||||||
in
|
|
||||||
nixpkgs;
|
|
||||||
|
|
||||||
# jobs - The jobs hydra is to run.
|
|
||||||
#
|
|
||||||
jobs =
|
|
||||||
let
|
|
||||||
# Extract the valid derivations from an overlay
|
|
||||||
#
|
|
||||||
# nameSet - the attribute set the uncomposed overlay (just the names are required)
|
|
||||||
# valueSet - the final attribute from the composed overlay (the values are required)
|
|
||||||
#
|
|
||||||
extractDerivations = valueSet: nameSet:
|
|
||||||
builtins.listToAttrs
|
|
||||||
( builtins.map
|
|
||||||
( name: { inherit name; value = valueSet.${name}; } )
|
|
||||||
( pkgs.lib.filter
|
|
||||||
( name: ( builtins.tryEval (pkgs.lib.isDerivation valueSet.${name}) ).value )
|
|
||||||
( pkgs.lib.attrNames nameSet ) ) );
|
|
||||||
|
|
||||||
# Provided overlays
|
|
||||||
pkgsNamesTopLevel = let self = import ./pkgs/all-packages.nix self { } self; in self;
|
|
||||||
pkgsNamesPython = let self = import ./pkgs/python-packages.nix { } self { } ; in self;
|
|
||||||
pkgsNamesR = let self = import ./pkgs/r-modules.nix { } self { } ; in self;
|
|
||||||
|
|
||||||
temporaryNamesTopLevel = let self = import ./temporary/all-packages.nix self { } self; in self;
|
|
||||||
temporaryNamesPython = let self = import ./temporary/python-packages.nix { } self { } ; in self;
|
|
||||||
temporaryNamesR = let self = import ./temporary/r-modules.nix { } self { } ; in self;
|
|
||||||
|
|
||||||
in {
|
|
||||||
# Tweak the nixos make-channel code to include the upstream channel.
|
|
||||||
#
|
|
||||||
# 1. Replace the git repo nixpkgs with a copy of the unpacked nixpkgs channel.
|
|
||||||
#
|
|
||||||
channel =
|
|
||||||
( import (nixpkgsGit + /nixos/lib/make-channel.nix) {
|
|
||||||
inherit pkgs version versionSuffix;
|
|
||||||
nixpkgs = ccpkgsGit;
|
|
||||||
} ).overrideAttrs ( original: {
|
|
||||||
name = "ccpkgs-channel";
|
|
||||||
distPhase =
|
|
||||||
''
|
|
||||||
rm -fr upstream
|
|
||||||
cp -rd ${nixpkgs}/nixos-${version} upstream
|
|
||||||
'' + original.distPhase;
|
|
||||||
} );
|
|
||||||
|
|
||||||
# tested - Aggregate package set required to built for automatic channel release
|
|
||||||
#
|
|
||||||
tested = pkgs.lib.hydraJob (pkgs.releaseTools.aggregate {
|
|
||||||
name = "ccpkgs-${version}";
|
|
||||||
|
|
||||||
constituents = builtins.map ( pkgs.lib.collect pkgs.lib.isDerivation ) [ jobs.pkgs jobs.temporary ];
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
description = "Release-critical builds for the ccpkgs channel";
|
|
||||||
maintainers = with pkgs.lib.maintainers; [ ];
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
# pkgs - Attribute set of overlayed pkgs.
|
|
||||||
#
|
|
||||||
pkgs = extractDerivations pkgs pkgsNamesTopLevel // {
|
|
||||||
python2Packages = extractDerivations pkgs.python2Packages pkgsNamesPython;
|
|
||||||
python3Packages = extractDerivations pkgs.python3Packages pkgsNamesPython;
|
|
||||||
rPackages = extractDerivations pkgs.rPackages pkgsNamesR;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
# temporary - Attribute set of overlayed pkgs.
|
|
||||||
#
|
|
||||||
temporary = extractDerivations pkgs temporaryNamesTopLevel // {
|
|
||||||
python2Packages = extractDerivations pkgs.python2Packages temporaryNamesPython;
|
|
||||||
python3Packages = extractDerivations pkgs.python3Packages temporaryNamesPython;
|
|
||||||
rPackages = extractDerivations pkgs.rPackages temporaryNamesR;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
in jobs
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
# Hydra runs the newer nix which doesn't have corepkgs/unpack-channel.nix. The nix-daemon runs the older nix which
|
|
||||||
# doesn't have builtins:unpack-channel. This means
|
|
||||||
#
|
|
||||||
# - for now we need to provide the older corepkgs/unpack-channel.nix
|
|
||||||
# - in the future we should switch to src/nix-channel/unpack-channel.nix
|
|
||||||
|
|
||||||
{ bash, coreutils, gnutar, xz, gzip, bzip2 }:
|
|
||||||
|
|
||||||
let
|
|
||||||
|
|
||||||
tarFlags = "--warning=no-timestamp";
|
|
||||||
|
|
||||||
builder = builtins.toFile "unpack-channel.sh"
|
|
||||||
''
|
|
||||||
mkdir $out
|
|
||||||
cd $out
|
|
||||||
xzpat="\.xz\$"
|
|
||||||
gzpat="\.gz\$"
|
|
||||||
if [[ "$src" =~ $xzpat ]]; then
|
|
||||||
xz -d < $src | tar xf - ${tarFlags}
|
|
||||||
elif [[ "$src" =~ $gzpat ]]; then
|
|
||||||
gzip -d < $src | tar xf - ${tarFlags}
|
|
||||||
else
|
|
||||||
bzip2 -d < $src | tar xf - ${tarFlags}
|
|
||||||
fi
|
|
||||||
if [ * != $channelName ]; then
|
|
||||||
mv * $out/$channelName
|
|
||||||
fi
|
|
||||||
'';
|
|
||||||
|
|
||||||
in
|
|
||||||
|
|
||||||
{ name, channelName, src }:
|
|
||||||
|
|
||||||
derivation {
|
|
||||||
system = builtins.currentSystem;
|
|
||||||
builder = "${bash}/bin/sh";
|
|
||||||
args = [ "-e" builder ];
|
|
||||||
inherit name channelName src;
|
|
||||||
|
|
||||||
PATH = "${coreutils}/bin:${gnutar}/bin:${xz}/bin:${gzip}/bin:${bzip2}/bin";
|
|
||||||
|
|
||||||
# No point in doing this remotely.
|
|
||||||
preferLocalBuild = true;
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user