Merge pull request #135659 from fabaff/fix-tokenizers

python3Packages.tokenizers: 0.10.3 -> unstable-2021-08-13
This commit is contained in:
Sandro 2021-08-27 06:21:53 +02:00 committed by GitHub
commit ba7c808301
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -49,19 +49,19 @@ let
};
in buildPythonPackage rec {
pname = "tokenizers";
version = "0.10.3";
version = "unstable-2021-08-13";
src = fetchFromGitHub {
owner = "huggingface";
repo = pname;
rev = "python-v${version}";
hash = "sha256-X7aUiJJjB2ZDlE8LbK7Pn/15SLTZbP8kb4l9ED7/xvU=";
rev = "e7dd6436dd4a4ffd9e8a4f110ca68e6a38677cb6";
sha256 = "1p7w9a43a9h6ys5nsa4g89l65dj11037p7a1lqkj4x1yc9kv2y1r";
};
cargoDeps = rustPlatform.fetchCargoTarball {
inherit src sourceRoot;
name = "${pname}-${version}";
hash = "sha256-gRqxlL6q87sGC0birDhCmGF+CVbfxwOxW6Tl6+5mGoo=";
sha256 = "1yb4jsx6mp9jgd1g3mli6vr6mri2afnwqlmxq1rpvn34z6b3iw9q";
};
sourceRoot = "source/bindings/python";
@ -97,6 +97,10 @@ in buildPythonPackage rec {
ln -s ${openaiMerges} openai-gpt-merges.txt )
'';
postPatch = ''
echo 'import multiprocessing; multiprocessing.set_start_method("fork")' >> tests/__init__.py
'';
preCheck = ''
HOME=$TMPDIR
'';