gnu: python-autograd: Update to 1.7.0.

* gnu/packages/machine-learning.scm (python-autograd): Update to 1.7.0.
[source]: Use the latest version tag.
[native-inputs]: Remove python-nose, python-setuptools, and
python-wheel; add python-hatchling.

Change-Id: I42cd6b9ce621c1509f459fb947b09d05635fb79b
This commit is contained in:
Sharlatan Hellseher 2025-03-25 12:01:49 +00:00 committed by Andreas Enge
parent ab3a8e1096
commit 7b6f6e9604
No known key found for this signature in database
GPG key ID: F7D5C9BF765C61E3

View file

@ -2681,27 +2681,24 @@ Covariance Matrix Adaptation Evolution Strategy (CMA-ES) for Python.")
(license license:expat))) (license license:expat)))
(define-public python-autograd (define-public python-autograd
(let* ((commit "c6d81ce7eede6db801d4e9a92b27ec5d409d0eab")
(revision "0")
(version (git-version "1.5" revision commit)))
(package (package
(name "python-autograd") (name "python-autograd")
(home-page "https://github.com/HIPS/autograd") (version "1.7.0")
(source (origin (source (origin
(method git-fetch) (method git-fetch)
(uri (git-reference (uri (git-reference
(url home-page) (url "https://github.com/HIPS/autograd")
(commit commit))) (commit (string-append "v" version))))
(sha256 (sha256
(base32 (base32
"04kljgydng42xlg044h6nbzxpban1ivd6jzb8ydkngfq88ppipfk")) "1fpnmm3mzw355iq7w751j4mjfcr0yh324cxidba1l22652gg8r8m"))
(file-name (git-file-name name version)))) (file-name (git-file-name name version))))
(version version)
(build-system pyproject-build-system) (build-system pyproject-build-system)
(native-inputs (native-inputs
(list python-nose python-pytest python-setuptools python-wheel)) (list python-hatchling python-pytest))
(propagated-inputs (propagated-inputs
(list python-future python-numpy)) (list python-future python-numpy))
(home-page "https://github.com/HIPS/autograd")
(synopsis "Efficiently computes derivatives of NumPy code") (synopsis "Efficiently computes derivatives of NumPy code")
(description "Autograd can automatically differentiate native Python and (description "Autograd can automatically differentiate native Python and
NumPy code. It can handle a large subset of Python's features, including loops, NumPy code. It can handle a large subset of Python's features, including loops,
@ -2711,7 +2708,7 @@ of derivatives. It supports reverse-mode differentiation
scalar-valued functions with respect to array-valued arguments, as well as scalar-valued functions with respect to array-valued arguments, as well as
forward-mode differentiation, and the two can be composed arbitrarily. The forward-mode differentiation, and the two can be composed arbitrarily. The
main intended application of Autograd is gradient-based optimization.") main intended application of Autograd is gradient-based optimization.")
(license license:expat)))) (license license:expat)))
(define-public lightgbm (define-public lightgbm
(package (package