about summary refs log tree commit diff
path: root/nixpkgs/pkgs/development/python-modules/torchaudio/bin.nix
blob: 1f6ae80b03290fb190d9efb6a7e96dc25873912a (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
{ lib
, stdenv
, buildPythonPackage
, fetchurl
, python
, pytorch-bin
, pythonOlder
, pythonAtLeast
}:

buildPythonPackage rec {
  pname = "torchaudio";
  version = "0.10.0";
  format = "wheel";

  src =
    let pyVerNoDot = lib.replaceStrings [ "." ] [ "" ] python.pythonVersion;
        unsupported = throw "Unsupported system";
        srcs = (import ./binary-hashes.nix version)."${stdenv.system}-${pyVerNoDot}" or unsupported;
    in fetchurl srcs;

  disabled = ! (pythonAtLeast "3.7" && pythonOlder "3.10");

  propagatedBuildInputs = [
    pytorch-bin
  ];

  # The wheel-binary is not stripped to avoid the error of `ImportError: libtorch_cuda_cpp.so: ELF load command address/offset not properly aligned.`.
  dontStrip = true;

  pythonImportsCheck = [ "torchaudio" ];

  postFixup = ''
    # Note: after patchelf'ing, libcudart can still not be found. However, this should
    #       not be an issue, because PyTorch is loaded before torchvision and brings
    #       in the necessary symbols.
    patchelf --set-rpath "${lib.makeLibraryPath [ stdenv.cc.cc.lib ]}:${pytorch-bin}/${python.sitePackages}/torch/lib:" \
      "$out/${python.sitePackages}/torchaudio/_torchaudio.so"
  '';

  meta = with lib; {
    description = "PyTorch audio library";
    homepage = "https://pytorch.org/";
    changelog = "https://github.com/pytorch/audio/releases/tag/v${version}";
    # Includes CUDA and Intel MKL, but redistributions of the binary are not limited.
    # https://docs.nvidia.com/cuda/eula/index.html
    # https://www.intel.com/content/www/us/en/developer/articles/license/onemkl-license-faq.html
    license = licenses.bsd3;
    platforms = platforms.linux;
    maintainers = with maintainers; [ junjihashimoto ];
  };
}