From 16d18cb2a77612f31df4cd06c3997b833390809c Mon Sep 17 00:00:00 2001 From: Shunsuke KITADA Date: Wed, 8 Jan 2025 16:11:40 +0900 Subject: [PATCH 1/9] update uv files --- pyproject.toml | 11 ++ uv.lock | 306 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 317 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 41c3081..555ac78 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,6 +2,10 @@ name = "diffusers-ncsn" version = "0.1.0" description = "Diffusers implementation of Noise Conditional Score Networks (NCSN) [Yang+ NeuriPS'19]" +authors = [ + { name = "Shunsuke Kitada", email = "shunsuke.kitada.0831@gmail.com" }, + { name = "Image Generation with Python", email = "python.image.generation@gmail.com" }, +] readme = "README.md" requires-python = ">=3.10" dependencies = [ @@ -14,6 +18,13 @@ dependencies = [ [project.optional-dependencies] training = ["matplotlib>=3.10.0"] +doc = [ + "sphinx-pyproject>=0.3.0", + "sphinx>=8.1.3", + "furo>=2024.8.6", + "myst-parser>=4.0.0", + "sphinx-autodoc-typehints>=3.0.0", +] [dependency-groups] dev = ["mypy>=1.0.0", "pytest>=6.0.0", "ruff>=0.1.5"] diff --git a/uv.lock b/uv.lock index d34bca3..1166a04 100644 --- a/uv.lock +++ b/uv.lock @@ -23,6 +23,36 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c2/60/a585c806d6c0ec5f8149d44eb202714792802f484e6e2b1bf96b23bd2b00/accelerate-1.2.1-py3-none-any.whl", hash = "sha256:be1cbb958cf837e7cdfbde46b812964b1b8ae94c9c7d94d921540beafcee8ddf", size = 336355 }, ] +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929 }, +] + +[[package]] +name = "babel" +version = "2.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/74/f1bc80f23eeba13393b7222b11d95ca3af2c1e28edca18af487137eefed9/babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316", size = 9348104 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/ca/824b1195773ce6166d388573fc106ce56d4a805bd7427b624e063596ec58/beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", size = 581181 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 }, +] + [[package]] name = "certifi" version = "2024.12.14" @@ -213,6 +243,13 @@ dependencies = [ ] [package.optional-dependencies] +doc = [ + { name = "furo" }, + { name = "myst-parser" }, + { name = "sphinx" }, + { name = "sphinx-autodoc-typehints" }, + { name = "sphinx-pyproject" }, +] training = [ { name = "matplotlib" }, ] @@ -228,7 +265,12 @@ dev = [ requires-dist = [ { name = "diffusers", extras = ["torch"], specifier = ">=0.31.0" }, { name = "einops", specifier = ">=0.7.0" }, + { name = "furo", marker = "extra == 'doc'", specifier = ">=2024.8.6" }, { name = "matplotlib", marker = "extra == 'training'", specifier = ">=3.10.0" }, + { name = "myst-parser", marker = "extra == 'doc'", specifier = ">=4.0.0" }, + { name = "sphinx", marker = "extra == 'doc'", specifier = ">=8.1.3" }, + { name = "sphinx-autodoc-typehints", marker = "extra == 'doc'", specifier = ">=3.0.0" }, + { name = "sphinx-pyproject", marker = "extra == 'doc'", specifier = ">=0.3.0" }, { name = "torch", specifier = ">=1.0.0" }, { name = "torchvision", specifier = ">=0.2.1" }, { name = "transformers", specifier = ">=4.30.0" }, @@ -241,6 +283,41 @@ dev = [ { name = "ruff", specifier = ">=0.1.5" }, ] +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408 }, +] + +[[package]] +name = "dom-toml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "domdf-python-tools" }, + { name = "tomli" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/34/f7690cf288eaa86b55c8f1b890d0834e6df44a026a88eca12274fcd624ab/dom_toml-2.0.0.tar.gz", hash = "sha256:3c07e8436538994974127b1ae037661d1a779ac915c44fd06b3ab5fe140ff589", size = 11133 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/99/b6fc87dff3138491d81676bdcbf1531080925ba41486ec1dafd86e33fdbc/dom_toml-2.0.0-py3-none-any.whl", hash = "sha256:0b6d02a72bcbc6be8175c61afc30623bbb6b74c4650f2a806fbc3fb7fe86935d", size = 13376 }, +] + +[[package]] +name = "domdf-python-tools" +version = "3.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "natsort" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6b/78/974e10c583ba9d2302e748c9585313a7f2c7ba00e4f600324f432e38fe68/domdf_python_tools-3.9.0.tar.gz", hash = "sha256:1f8a96971178333a55e083e35610d7688cd7620ad2b99790164e1fc1a3614c18", size = 103792 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/e9/7447a88b217650a74927d3444a89507986479a69b83741900eddd34167fe/domdf_python_tools-3.9.0-py3-none-any.whl", hash = "sha256:4e1ef365cbc24627d6d1e90cf7d46d8ab8df967e1237f4a26885f6986c78872e", size = 127106 }, +] + [[package]] name = "einops" version = "0.8.0" @@ -318,6 +395,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/de/86/5486b0188d08aa643e127774a99bac51ffa6cf343e3deb0583956dca5b22/fsspec-2024.12.0-py3-none-any.whl", hash = "sha256:b520aed47ad9804237ff878b504267a3b0b441e97508bd6d2d8774e3db85cee2", size = 183862 }, ] +[[package]] +name = "furo" +version = "2024.8.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "pygments" }, + { name = "sphinx" }, + { name = "sphinx-basic-ng" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/e2/d351d69a9a9e4badb4a5be062c2d0e87bd9e6c23b5e57337fef14bef34c8/furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01", size = 1661506 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/48/e791a7ed487dbb9729ef32bb5d1af16693d8925f4366befef54119b2e576/furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c", size = 341333 }, +] + [[package]] name = "huggingface-hub" version = "0.27.0" @@ -345,6 +437,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, ] +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769 }, +] + [[package]] name = "importlib-metadata" version = "8.5.0" @@ -465,6 +566,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/1d/50ad811d1c5dae091e4cf046beba925bcae0a610e79ae4c538f996f63ed5/kiwisolver-1.4.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:65ea09a5a3faadd59c2ce96dc7bf0f364986a315949dc6374f04396b0d60e09b", size = 71762 }, ] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + [[package]] name = "markupsafe" version = "3.0.2" @@ -575,6 +688,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c9/b4/680aa700d99b48e8c4393fa08e9ab8c49c0555ee6f4c9c0a5e8ea8dfde5d/matplotlib-3.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae80dc3a4add4665cf2faa90138384a7ffe2a4e37c58d83e115b54287c4f06ef", size = 8587361 }, ] +[[package]] +name = "mdit-py-plugins" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + [[package]] name = "mpmath" version = "1.3.0" @@ -627,6 +761,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, ] +[[package]] +name = "myst-parser" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "jinja2" }, + { name = "markdown-it-py" }, + { name = "mdit-py-plugins" }, + { name = "pyyaml" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/55/6d1741a1780e5e65038b74bce6689da15f620261c490c3511eb4c12bac4b/myst_parser-4.0.0.tar.gz", hash = "sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531", size = 93858 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/b4/b036f8fdb667587bb37df29dc6644681dd78b7a2a6321a34684b79412b28/myst_parser-4.0.0-py3-none-any.whl", hash = "sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d", size = 84563 }, +] + +[[package]] +name = "natsort" +version = "8.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e2/a9/a0c57aee75f77794adaf35322f8b6404cbd0f89ad45c87197a937764b7d0/natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581", size = 76575 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/82/7a9d0550484a62c6da82858ee9419f3dd1ccc9aa1c26a1e43da3ecd20b0d/natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c", size = 38268 }, +] + [[package]] name = "networkx" version = "3.4.2" @@ -918,6 +1078,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7b/d7/7831438e6c3ebbfa6e01a927127a6cb42ad3ab844247f3c5b96bea25d73d/psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649", size = 254444 }, ] +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, +] + [[package]] name = "pyparsing" version = "3.2.0" @@ -1188,6 +1357,143 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, ] +[[package]] +name = "snowballstemmer" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/7b/af302bebf22c749c56c9c3e8ae13190b5b5db37a33d9068652e8f73b7089/snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", size = 86699 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/dc/c02e01294f7265e63a7315fe086dd1df7dacb9f840a804da846b96d01b96/snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a", size = 93002 }, +] + +[[package]] +name = "soupsieve" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 }, +] + +[[package]] +name = "sphinx" +version = "8.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alabaster" }, + { name = "babel" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "docutils" }, + { name = "imagesize" }, + { name = "jinja2" }, + { name = "packaging" }, + { name = "pygments" }, + { name = "requests" }, + { name = "snowballstemmer" }, + { name = "sphinxcontrib-applehelp" }, + { name = "sphinxcontrib-devhelp" }, + { name = "sphinxcontrib-htmlhelp" }, + { name = "sphinxcontrib-jsmath" }, + { name = "sphinxcontrib-qthelp" }, + { name = "sphinxcontrib-serializinghtml" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125 }, +] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/37/725d06f80cbe85b8538021df4515b7408af231cbe8b201a3bb2cf773a83d/sphinx_autodoc_typehints-3.0.0.tar.gz", hash = "sha256:d5cdab471efb10fcff4ffe81a2ef713398bc891af9d942a4b763f5ed1d9bf550", size = 35943 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/36/110d38a8b481b915d77461ca68764ebacf7aeba84be7cbe2dc965a65ae80/sphinx_autodoc_typehints-3.0.0-py3-none-any.whl", hash = "sha256:b82bf83e23ae3d5dc25881004a6d6614be6291ff8ff165b2d1e18799f0f6bd74", size = 20041 }, +] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/0b/a866924ded68efec7a1759587a4e478aec7559d8165fac8b2ad1c0e774d6/sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9", size = 20736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/dd/018ce05c532a22007ac58d4f45232514cd9d6dd0ee1dc374e309db830983/sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b", size = 22496 }, +] + +[[package]] +name = "sphinx-pyproject" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dom-toml" }, + { name = "domdf-python-tools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/97/aa8cec3da3e78f2c396b63332e2fe92fe43f7ff2ad19b3998735f28b0a7f/sphinx_pyproject-0.3.0.tar.gz", hash = "sha256:efc4ee9d96f579c4e4ed1ac273868c64565e88c8e37fe6ec2dc59fbcd57684ab", size = 7695 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/d5/89cb47c6399fd57ca451af15361499813c5d53e588cb6e00d89411ce724f/sphinx_pyproject-0.3.0-py3-none-any.whl", hash = "sha256:3aca968919f5ecd390f96874c3f64a43c9c7fcfdc2fd4191a781ad9228501b52", size = 23076 }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300 }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530 }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705 }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071 }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743 }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072 }, +] + [[package]] name = "sympy" version = "1.13.1" From 7ff577d73a6f979c9621606ca29ab770cdd5e4e6 Mon Sep 17 00:00:00 2001 From: Shunsuke KITADA Date: Wed, 8 Jan 2025 16:12:06 +0900 Subject: [PATCH 2/9] update to add docstrings --- src/ncsn/__init__.py | 10 ++++ src/ncsn/pipeline_ncsn.py | 18 +++++-- src/ncsn/scheduler/scheduling_ncsn.py | 75 ++++++++++++++++++++++++++- src/ncsn/unet/unet_2d_ncsn.py | 55 +++++++++++++++++++- 4 files changed, 151 insertions(+), 7 deletions(-) diff --git a/src/ncsn/__init__.py b/src/ncsn/__init__.py index e69de29..ba2778a 100644 --- a/src/ncsn/__init__.py +++ b/src/ncsn/__init__.py @@ -0,0 +1,10 @@ +from importlib.metadata import version + +from ncsn.pipeline_ncsn import NCSNPipeline + +__version__ = version("diffusers-ncsn") + + +__all__ = [ + "NCSNPipeline", +] diff --git a/src/ncsn/pipeline_ncsn.py b/src/ncsn/pipeline_ncsn.py index 18244ce..09036ee 100644 --- a/src/ncsn/pipeline_ncsn.py +++ b/src/ncsn/pipeline_ncsn.py @@ -14,10 +14,11 @@ def normalize_images(image: torch.Tensor) -> torch.Tensor: - """Normalize the image to be between 0 and 1 using min-max normalization manner. + r"""Normalize the image to be between 0 and 1 using min-max normalization manner. Args: - image (torch.Tensor): The batch of images to normalize. + image (torch.Tensor): + The batch of images to normalize. Returns: torch.Tensor: The normalized image. @@ -59,6 +60,15 @@ def __init__( self.register_modules(unet=unet, scheduler=scheduler) def decode_samples(self, samples: torch.Tensor) -> torch.Tensor: + r"""Decodes the generated samples to the correct format suitable for images. + + Args: + samples (torch.Tensor): + The generated samples to decode. + + Returns: + torch.Tensor: The decoded samples. + """ # Normalize the generated image samples = normalize_images(samples) # Rearrange the generated image to the correct format @@ -109,8 +119,8 @@ def __call__( `._callback_tensor_inputs` attribute of your pipeline class. Returns: - [`~pipelines.ImagePipelineOutput`] or `tuple`: - If `return_dict` is `True`, [`~pipelines.ImagePipelineOutput`] is returned, otherwise a `tuple` is + `diffusers.pipelines.ImagePipelineOutput` or `tuple`: + If `return_dict` is `True`, `diffusers.pipelines.ImagePipelineOutput` is returned, otherwise a `tuple` is returned where the first element is a list with the generated images. """ callback_on_step_end_tensor_inputs = ( diff --git a/src/ncsn/scheduler/scheduling_ncsn.py b/src/ncsn/scheduler/scheduling_ncsn.py index f31145e..9581e83 100644 --- a/src/ncsn/scheduler/scheduling_ncsn.py +++ b/src/ncsn/scheduler/scheduling_ncsn.py @@ -16,11 +16,21 @@ @dataclass class AnnealedLangevinDynamicsOutput(SchedulerOutput): - """Annealed Langevin Dynamics output class.""" + r"""Annealed Langevin Dynamics output class.""" class AnnealedLangevinDynamicsScheduler(SchedulerMixin, ConfigMixin): # type: ignore - """Annealed Langevin Dynamics scheduler for Noise Conditional Score Network (NCSN).""" + r"""Annealed Langevin Dynamics scheduler for Noise Conditional Score Networks (NCSN). + + This scheduler inherits from [`SchedulerMixin`]. Check the superclass documentation for it's generic methods implemented for all schedulers (such as downloading or saving). + + Args: + num_train_timesteps (`int`): Number of training timesteps. + num_annealed_steps (`int`): Number of annealed steps. + sigma_min (`float`): Minimum standard deviation for the isotropic Gaussian noise. + sigma_max (`float`): Maximum standard deviation for the isotropic Gaussian noise. + sampling_eps (`float`): Sampling epsilon for the Langevin dynamics. + """ order = 1 @@ -72,6 +82,22 @@ def set_timesteps( sampling_eps: Optional[float] = None, device: Optional[Union[str, torch.device]] = None, ) -> None: + r"""Sets the timesteps for the scheduler (to be run before inference). + + Args: + num_inference_steps (`int`): + The number of diffusion steps used when generating samples with a pre-trained model. If used, + `timesteps` must be `None`. + device (`str` or `torch.device`, *optional*): + The device to which the timesteps should be moved to. If `None`, the timesteps are not moved. + Defined to maintain compatibility with other pipelines, but this argument is not actually used. + sampling_eps (`float`, *optional*): + The sampling epsilon for the Langevin dynamics. If `None`, the default value is used. + timesteps (`List[int]`, *optional*): + Custom timesteps used to support arbitrary spacing between timesteps. If `None`, then the default + timestep spacing strategy of equal spacing between timesteps is used. If `timesteps` is passed, + `num_inference_steps` must be `None`. + """ sampling_eps = sampling_eps or self._sampling_eps self._timesteps = torch.arange(start=0, end=num_inference_steps) @@ -82,6 +108,19 @@ def set_sigmas( sigma_max: Optional[float] = None, sampling_eps: Optional[float] = None, ) -> None: + r"""Sets the sigmas and step sizes for the scheduler (to be run before inference). + + Args: + num_inference_steps (`int`): + The number of diffusion steps used when generating samples with a pre-trained model. If used, + `sigmas` and `step_size` must be `None`. + sigma_min (`float`, *optional*): + The minimum standard deviation for the isotropic Gaussian noise. If `None`, the default value is used. + sigma_max (`float`, *optional*): + The maximum standard deviation for the isotropic Gaussian noise. If `None`, the default value is used. + sampling_eps (`float`, *optional*): + The sampling epsilon for the Langevin dynamics. If `None`, the default value is used. + """ if self._timesteps is None: self.set_timesteps( num_inference_steps=num_inference_steps, @@ -109,6 +148,24 @@ def step( return_dict: bool = True, **kwargs, ) -> Union[AnnealedLangevinDynamicsOutput, Tuple]: + r"""Perform one step following Langevin dynamics. Annealing must be done separately. + + Args: + model_output (`torch.Tensor`): + The score output from learned neural network-based score function. + timestep (`int`): + The current timestep. + sample (`torch.Tensor`): + The current sample. + return_dict (`bool`, *optional*): + Whether or not to return :py:class:`~ncsn.scheduler.AnnealedLangevinDynamicsOutput` or `tuple`. + + Returns: + :py:class:`~ncsn.scheduler.AnnealedLangevinDynamicsOutput` or `tuple`: + if `return_dict` is `True`, :py:class:`~ncsn.scheduler.AnnealedLangevinDynamicsOutput` is returned, + otherwise a tuple is returned where the first element is the updated sample. + """ + z = torch.randn_like(sample) step_size = self.step_size[timestep] sample = sample + 0.5 * step_size * model_output + torch.sqrt(step_size) * z @@ -124,6 +181,20 @@ def add_noise( noise: torch.Tensor, timesteps: torch.Tensor, ) -> torch.Tensor: + r"""Add noise to the original samples. + + Args: + original_samples (`torch.Tensor`): + The original samples. + noise (`torch.Tensor`): + The noise to be added. + timesteps (`torch.Tensor`): + The timesteps. + + Returns: + `torch.Tensor`: + The noisy samples. + """ timesteps = timesteps.to(original_samples.device) sigmas = self.sigmas.to(original_samples.device)[timesteps] sigmas = rearrange(sigmas, "b -> b 1 1 1") diff --git a/src/ncsn/unet/unet_2d_ncsn.py b/src/ncsn/unet/unet_2d_ncsn.py index 5a8ec82..e8a5c8c 100644 --- a/src/ncsn/unet/unet_2d_ncsn.py +++ b/src/ncsn/unet/unet_2d_ncsn.py @@ -8,6 +8,59 @@ class UNet2DModelForNCSN(UNet2DModel, ModelMixin, ConfigMixin): # type: ignore[misc] + r"""A 2D UNet model for Noise Conditional Score Networks (NCSN). + + This model inherits from [`diffusers.UNet2DModel`], which is a 2D UNet model that takes a noisy sample and a timestep and returns a sample shaped output. This model also inherits from [`diffusers.models.modeling_utils.ModelMixin`]. Check the superclass documentation for it's generic methods implemented for all models (such as downloading or saving). + + Args: + sigma_min (`float`): Minimum standard deviation for the isotropic Gaussian noise. + sigma_max (`float`): Maximum standard deviation for the isotropic Gaussian noise. + sample_size (`int` or `Tuple[int, int]`, *optional*, defaults to `None`): + Height and width of input/output sample. Dimensions must be a multiple of `2 ** (len(block_out_channels) - + 1)`. + in_channels (`int`, *optional*, defaults to 3): Number of channels in the input sample. + out_channels (`int`, *optional*, defaults to 3): Number of channels in the output. + center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. + time_embedding_type (`str`, *optional*, defaults to `"positional"`): Type of time embedding to use. + freq_shift (`int`, *optional*, defaults to 0): Frequency shift for Fourier time embedding. + flip_sin_to_cos (`bool`, *optional*, defaults to `True`): + Whether to flip sin to cos for Fourier time embedding. + down_block_types (`Tuple[str]`, *optional*, defaults to `("DownBlock2D", "AttnDownBlock2D", "AttnDownBlock2D", "AttnDownBlock2D")`): + Tuple of downsample block types. + mid_block_type (`str`, *optional*, defaults to `"UNetMidBlock2D"`): + Block type for middle of UNet, it can be either `UNetMidBlock2D` or `UnCLIPUNetMidBlock2D`. + up_block_types (`Tuple[str]`, *optional*, defaults to `("AttnUpBlock2D", "AttnUpBlock2D", "AttnUpBlock2D", "UpBlock2D")`): + Tuple of upsample block types. + block_out_channels (`Tuple[int]`, *optional*, defaults to `(224, 448, 672, 896)`): + Tuple of block output channels. + layers_per_block (`int`, *optional*, defaults to `2`): The number of layers per block. + mid_block_scale_factor (`float`, *optional*, defaults to `1`): The scale factor for the mid block. + downsample_padding (`int`, *optional*, defaults to `1`): The padding for the downsample convolution. + downsample_type (`str`, *optional*, defaults to `conv`): + The downsample type for downsampling layers. Choose between "conv" and "resnet" + upsample_type (`str`, *optional*, defaults to `conv`): + The upsample type for upsampling layers. Choose between "conv" and "resnet" + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. + attention_head_dim (`int`, *optional*, defaults to `8`): The attention head dimension. + norm_num_groups (`int`, *optional*, defaults to `32`): The number of groups for normalization. + attn_norm_num_groups (`int`, *optional*, defaults to `None`): + If set to an integer, a group norm layer will be created in the mid block's [`Attention`] layer with the + given number of groups. If left as `None`, the group norm layer will only be created if + `resnet_time_scale_shift` is set to `default`, and if created will have `norm_num_groups` groups. + norm_eps (`float`, *optional*, defaults to `1e-5`): The epsilon for normalization. + resnet_time_scale_shift (`str`, *optional*, defaults to `"default"`): Time scale shift config + for ResNet blocks (see [`~models.resnet.ResnetBlock2D`]). Choose from `default` or `scale_shift`. + class_embed_type (`str`, *optional*, defaults to `None`): + The type of class embedding to use which is ultimately summed with the time embeddings. Choose from `None`, + `"timestep"`, or `"identity"`. + num_class_embeds (`int`, *optional*, defaults to `None`): + Input dimension of the learnable embedding matrix to be projected to `time_embed_dim` when performing class + conditioning with `class_embed_type` equal to `None`. + """ + + sigmas: torch.Tensor + @register_to_config def __init__( self, @@ -86,4 +139,4 @@ def __init__( steps=num_train_timesteps, ) ) - self.register_buffer("sigmas", sigmas) # type: ignore + self.register_buffer("sigmas", sigmas) From debe75240e20f74ff8e784126cb2f5be87cb3ae9 Mon Sep 17 00:00:00 2001 From: Shunsuke KITADA Date: Wed, 8 Jan 2025 16:12:26 +0900 Subject: [PATCH 3/9] update for typehint --- train_mnist.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/train_mnist.py b/train_mnist.py index eecf32d..7969c84 100644 --- a/train_mnist.py +++ b/train_mnist.py @@ -188,7 +188,8 @@ def train_iteration( # Calculate the score using the model scores = unet(x_noisy, t).sample # type: ignore # Calculate the target score - used_sigmas = unet.sigmas[t] # type: ignore + + used_sigmas = unet.sigmas[t] used_sigmas = rearrange(used_sigmas, "b -> b 1 1 1") target = -1 / used_sigmas * z # Rearrange the tensors From aeaa9fc5046f320e07ddeef2f73d4de82288b993 Mon Sep 17 00:00:00 2001 From: Shunsuke KITADA Date: Wed, 8 Jan 2025 16:12:50 +0900 Subject: [PATCH 4/9] add fles for docs --- docs/Makefile | 20 +++++ docs/make.bat | 35 +++++++++ docs/source/api/pipeline/index.md | 8 ++ docs/source/api/pipeline/pipeline-ncsn.md | 16 ++++ docs/source/api/scheduler/index.md | 8 ++ docs/source/api/scheduler/scheduling-ncsn.md | 21 ++++++ docs/source/api/unet/index.md | 8 ++ docs/source/api/unet/unet-2d-ncsn.md | 15 ++++ docs/source/conf.py | 78 ++++++++++++++++++++ docs/source/index.md | 12 +++ 10 files changed, 221 insertions(+) create mode 100644 docs/Makefile create mode 100644 docs/make.bat create mode 100644 docs/source/api/pipeline/index.md create mode 100644 docs/source/api/pipeline/pipeline-ncsn.md create mode 100644 docs/source/api/scheduler/index.md create mode 100644 docs/source/api/scheduler/scheduling-ncsn.md create mode 100644 docs/source/api/unet/index.md create mode 100644 docs/source/api/unet/unet-2d-ncsn.md create mode 100644 docs/source/conf.py create mode 100644 docs/source/index.md diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d0c3cbf --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..747ffb7 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/api/pipeline/index.md b/docs/source/api/pipeline/index.md new file mode 100644 index 0000000..96dcfa9 --- /dev/null +++ b/docs/source/api/pipeline/index.md @@ -0,0 +1,8 @@ +# Pipeline + +```{toctree} +:maxdepth: 2 +:caption: Pipeline: + +pipeline-ncsn +``` diff --git a/docs/source/api/pipeline/pipeline-ncsn.md b/docs/source/api/pipeline/pipeline-ncsn.md new file mode 100644 index 0000000..0158656 --- /dev/null +++ b/docs/source/api/pipeline/pipeline-ncsn.md @@ -0,0 +1,16 @@ +# Pipeline for NCSN + +`NCSNPipeline` is a pipeline for training and inference of Noise Conditional Score Networks (NCSN) proposed by by Yang Song and Stefano Ermon in the paper [Generative Modeling by Estimating Gradients of the Data Distribution](https://arxiv.org/abs/1907.05600). The pipeline is designed to be used with the +`UNet2DModelForNCSN` model and the `AnnealedLangevinDynamicsScheduler` scheduler. + +The abstract of the paper is the following: + +> We introduce a new generative model where samples are produced via Langevin dynamics using gradients of the data distribution estimated with score matching. Because gradients can be ill-defined and hard to estimate when the data resides on low-dimensional manifolds, we perturb the data with different levels of Gaussian noise, and jointly estimate the corresponding scores, i.e., the vector fields of gradients of the perturbed data distribution for all noise levels. For sampling, we propose an annealed Langevin dynamics where we use gradients corresponding to gradually decreasing noise levels as the sampling process gets closer to the data manifold. Our framework allows flexible model architectures, requires no sampling during training or the use of adversarial methods, and provides a learning objective that can be used for principled model comparisons. Our models produce samples comparable to GANs on MNIST, CelebA and CIFAR-10 datasets, achieving a new state-of-the-art inception score of 8.87 on CIFAR-10. Additionally, we demonstrate that our models learn effective representations via image inpainting experiments. + +## NCSNPipeline + +```{eval-rst} +.. autoclass:: ncsn.pipeline_ncsn.NCSNPipeline + :members: + :special-members: +``` diff --git a/docs/source/api/scheduler/index.md b/docs/source/api/scheduler/index.md new file mode 100644 index 0000000..f319193 --- /dev/null +++ b/docs/source/api/scheduler/index.md @@ -0,0 +1,8 @@ +# Scheduler + +```{toctree} +:maxdepth: 2 +:caption: Scheduler: + +scheduling-ncsn +``` diff --git a/docs/source/api/scheduler/scheduling-ncsn.md b/docs/source/api/scheduler/scheduling-ncsn.md new file mode 100644 index 0000000..3ec6c29 --- /dev/null +++ b/docs/source/api/scheduler/scheduling-ncsn.md @@ -0,0 +1,21 @@ +# Annealed Langevin Dynamics Scheduler + +`AnnealedLangevinDynamicsScheduler` is a scheduler that uses Langevin dynamics to sample from the posterior distribution of the model parameters. The scheduler anneals the temperature of the Langevin dynamics over time, starting from a high temperature and gradually decreasing it to a low temperature. The scheduler is based on the paper [Generative Modeling by Estimating Gradients of the Data Distribution](https://arxiv.org/abs/1907.05600) by Yang Song and Stefano Ermon. Stanford AI Lab. + +The abstract of the paper is the following: +> We introduce a new generative model where samples are produced via Langevin dynamics using gradients of the data distribution estimated with score matching. Because gradients can be ill-defined and hard to estimate when the data resides on low-dimensional manifolds, we perturb the data with different levels of Gaussian noise, and jointly estimate the corresponding scores, i.e., the vector fields of gradients of the perturbed data distribution for all noise levels. For sampling, we propose an annealed Langevin dynamics where we use gradients corresponding to gradually decreasing noise levels as the sampling process gets closer to the data manifold. Our framework allows flexible model architectures, requires no sampling during training or the use of adversarial methods, and provides a learning objective that can be used for principled model comparisons. Our models produce samples comparable to GANs on MNIST, CelebA and CIFAR-10 datasets, achieving a new state-of-the-art inception score of 8.87 on CIFAR-10. Additionally, we demonstrate that our models learn effective representations via image inpainting experiments. + +## AnnealedLangevinDynamicsScheduler + +```{eval-rst} +.. autoclass:: ncsn.scheduler.AnnealedLangevinDynamicsScheduler + :members: + :special-members: +``` + +## AnnealedLangevinDynamicsOutput + +```{eval-rst} +.. autoclass:: ncsn.scheduler.AnnealedLangevinDynamicsOutput + :members: +``` diff --git a/docs/source/api/unet/index.md b/docs/source/api/unet/index.md new file mode 100644 index 0000000..8ccb36f --- /dev/null +++ b/docs/source/api/unet/index.md @@ -0,0 +1,8 @@ +# UNet + +```{toctree} +:maxdepth: 2 +:caption: UNet: + +unet-2d-ncsn +``` diff --git a/docs/source/api/unet/unet-2d-ncsn.md b/docs/source/api/unet/unet-2d-ncsn.md new file mode 100644 index 0000000..40a9a4e --- /dev/null +++ b/docs/source/api/unet/unet-2d-ncsn.md @@ -0,0 +1,15 @@ +# UNet2DModel for NCSN + +`UNet2DModelForNCSN` is a 2D U-Net model suitable for Noise Conditional Score Networks (NCSN) proposed by by Yang Song and Stefano Ermon in the paper [Generative Modeling by Estimating Gradients of the Data Distribution](https://arxiv.org/abs/1907.05600). The model inherits from the diffuser's `UNet2DModel` and is designed to be used with the `AnnealedLangevinDynamicsScheduler` scheduler. + +The abstract of the paper is the following: + +> We introduce a new generative model where samples are produced via Langevin dynamics using gradients of the data distribution estimated with score matching. Because gradients can be ill-defined and hard to estimate when the data resides on low-dimensional manifolds, we perturb the data with different levels of Gaussian noise, and jointly estimate the corresponding scores, i.e., the vector fields of gradients of the perturbed data distribution for all noise levels. For sampling, we propose an annealed Langevin dynamics where we use gradients corresponding to gradually decreasing noise levels as the sampling process gets closer to the data manifold. Our framework allows flexible model architectures, requires no sampling during training or the use of adversarial methods, and provides a learning objective that can be used for principled model comparisons. Our models produce samples comparable to GANs on MNIST, CelebA and CIFAR-10 datasets, achieving a new state-of-the-art inception score of 8.87 on CIFAR-10. Additionally, we demonstrate that our models learn effective representations via image inpainting experiments. + +## UNet2DModelForNCSN + +```{eval-rst} +.. autoclass:: ncsn.unet.UNet2DModelForNCSN + :members: + :special-members: +``` diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..0f6235b --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,78 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +import os +import sys + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. + +sys.path.insert(0, os.path.abspath("../../../src")) + +from datetime import datetime + +from sphinx_pyproject import SphinxConfig + +config = SphinxConfig(pyproject_file="../../pyproject.toml") + + +project = config.name +copyright = f"{datetime.today().year}, {config.author}" + +author = config.author +release = config.version + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "myst_parser", + "sphinx.ext.intersphinx", + "sphinx.ext.viewcode", + "sphinx_autodoc_typehints", +] + +templates_path = ["_templates"] +exclude_patterns = ["build"] + +source_suffix = [".rst", ".md"] + +# -- Extension configuration ------------------------------------------------- + +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), + "rich": ("https://rich.readthedocs.io/en/latest", None), + "torch": ("https://pytorch.org/docs/stable", None), + "flax": ("https://flax.readthedocs.io/en/latest", None), + "fairscale": ("https://fairscale.readthedocs.io/en/latest/", None), + "datasets": ("https://huggingface.co/docs/datasets/master/en", None), + "transformers": ("https://huggingface.co/docs/transformers/master/en", None), + "beaker": ("https://beaker-py.readthedocs.io/en/latest/", None), +} + +# Tell myst-parser to assign header anchors for h1-h3. +myst_heading_anchors = 3 + +# By default, sort documented members by type within classes and modules. +autodoc_member_order = "groupwise" + +python_use_unqualified_type_names = True + +# Include default values when documenting parameter types. +typehints_defaults = "comma" + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "furo" +html_static_path = ["_static"] diff --git a/docs/source/index.md b/docs/source/index.md new file mode 100644 index 0000000..c39b97f --- /dev/null +++ b/docs/source/index.md @@ -0,0 +1,12 @@ +```{include} ../../README.md +``` + +```{toctree} +:maxdepth: 2 +:hidden: +:caption: API Reference: + +api/unet/index +api/scheduler/index +api/pipeline/index +``` From 9c613467242bfd78d0cbf4ac49f1f9f7e586444f Mon Sep 17 00:00:00 2001 From: Shunsuke KITADA Date: Wed, 8 Jan 2025 16:13:47 +0900 Subject: [PATCH 5/9] update .gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 3ea405e..2c8ae31 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,9 @@ # Created by https://www.toptal.com/developers/gitignore/api/python # Edit at https://www.toptal.com/developers/gitignore?templates=python +*.bk *.png +*.gif *.json *.safetensors From 94737e21a14a08b32b83fae6f749fa7d5be32d2c Mon Sep 17 00:00:00 2001 From: Shunsuke KITADA Date: Wed, 8 Jan 2025 17:09:55 +0900 Subject: [PATCH 6/9] update --- docs/source/api/pipeline/pipeline-ncsn.md | 4 ++-- docs/source/api/scheduler/scheduling-ncsn.md | 2 +- docs/source/api/unet/unet-2d-ncsn.md | 2 +- docs/source/conf.py | 6 +----- src/ncsn/pipeline_ncsn.py | 17 ++++++++--------- src/ncsn/scheduler/scheduling_ncsn.py | 2 +- src/ncsn/unet/unet_2d_ncsn.py | 4 ++-- 7 files changed, 16 insertions(+), 21 deletions(-) diff --git a/docs/source/api/pipeline/pipeline-ncsn.md b/docs/source/api/pipeline/pipeline-ncsn.md index 0158656..bb760a6 100644 --- a/docs/source/api/pipeline/pipeline-ncsn.md +++ b/docs/source/api/pipeline/pipeline-ncsn.md @@ -1,7 +1,7 @@ # Pipeline for NCSN -`NCSNPipeline` is a pipeline for training and inference of Noise Conditional Score Networks (NCSN) proposed by by Yang Song and Stefano Ermon in the paper [Generative Modeling by Estimating Gradients of the Data Distribution](https://arxiv.org/abs/1907.05600). The pipeline is designed to be used with the -`UNet2DModelForNCSN` model and the `AnnealedLangevinDynamicsScheduler` scheduler. +{py:class}`~ncsn.pipeline_ncsn.NCSNPipeline` is a pipeline for training and inference of Noise Conditional Score Networks (NCSN) proposed by by Yang Song and Stefano Ermon in the paper [Generative Modeling by Estimating Gradients of the Data Distribution](https://arxiv.org/abs/1907.05600). The pipeline is designed to be used with the +{py:class}`~ncsn.unet.UNet2DModelForNCSN` model and the {py:class}`~ncsn.scheduler.AnnealedLangevinDynamicsScheduler` scheduler. The abstract of the paper is the following: diff --git a/docs/source/api/scheduler/scheduling-ncsn.md b/docs/source/api/scheduler/scheduling-ncsn.md index 3ec6c29..0b134fe 100644 --- a/docs/source/api/scheduler/scheduling-ncsn.md +++ b/docs/source/api/scheduler/scheduling-ncsn.md @@ -1,6 +1,6 @@ # Annealed Langevin Dynamics Scheduler -`AnnealedLangevinDynamicsScheduler` is a scheduler that uses Langevin dynamics to sample from the posterior distribution of the model parameters. The scheduler anneals the temperature of the Langevin dynamics over time, starting from a high temperature and gradually decreasing it to a low temperature. The scheduler is based on the paper [Generative Modeling by Estimating Gradients of the Data Distribution](https://arxiv.org/abs/1907.05600) by Yang Song and Stefano Ermon. Stanford AI Lab. +{py:class}`~ncsn.scheduler.AnnealedLangevinDynamicsScheduler` is a scheduler that uses Langevin dynamics to sample from the posterior distribution of the model parameters. The scheduler anneals the temperature of the Langevin dynamics over time, starting from a high temperature and gradually decreasing it to a low temperature. The scheduler is based on the paper [Generative Modeling by Estimating Gradients of the Data Distribution](https://arxiv.org/abs/1907.05600) by Yang Song and Stefano Ermon. Stanford AI Lab. The abstract of the paper is the following: > We introduce a new generative model where samples are produced via Langevin dynamics using gradients of the data distribution estimated with score matching. Because gradients can be ill-defined and hard to estimate when the data resides on low-dimensional manifolds, we perturb the data with different levels of Gaussian noise, and jointly estimate the corresponding scores, i.e., the vector fields of gradients of the perturbed data distribution for all noise levels. For sampling, we propose an annealed Langevin dynamics where we use gradients corresponding to gradually decreasing noise levels as the sampling process gets closer to the data manifold. Our framework allows flexible model architectures, requires no sampling during training or the use of adversarial methods, and provides a learning objective that can be used for principled model comparisons. Our models produce samples comparable to GANs on MNIST, CelebA and CIFAR-10 datasets, achieving a new state-of-the-art inception score of 8.87 on CIFAR-10. Additionally, we demonstrate that our models learn effective representations via image inpainting experiments. diff --git a/docs/source/api/unet/unet-2d-ncsn.md b/docs/source/api/unet/unet-2d-ncsn.md index 40a9a4e..25687db 100644 --- a/docs/source/api/unet/unet-2d-ncsn.md +++ b/docs/source/api/unet/unet-2d-ncsn.md @@ -1,6 +1,6 @@ # UNet2DModel for NCSN -`UNet2DModelForNCSN` is a 2D U-Net model suitable for Noise Conditional Score Networks (NCSN) proposed by by Yang Song and Stefano Ermon in the paper [Generative Modeling by Estimating Gradients of the Data Distribution](https://arxiv.org/abs/1907.05600). The model inherits from the diffuser's `UNet2DModel` and is designed to be used with the `AnnealedLangevinDynamicsScheduler` scheduler. +{py:class}`~ncsn.unet.UNet2DModelForNCSN` is a 2D U-Net model suitable for Noise Conditional Score Networks (NCSN) proposed by by Yang Song and Stefano Ermon in the paper [Generative Modeling by Estimating Gradients of the Data Distribution](https://arxiv.org/abs/1907.05600). The model inherits from the diffuser's {py:class}`~diffusers.UNet2DModel` and is designed to be used with the {py:class}`~ncsn.scheduler.AnnealedLangevinDynamicsScheduler` scheduler. The abstract of the paper is the following: diff --git a/docs/source/conf.py b/docs/source/conf.py index 0f6235b..1ca64c0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -51,13 +51,9 @@ intersphinx_mapping = { "python": ("https://docs.python.org/3", None), - "rich": ("https://rich.readthedocs.io/en/latest", None), "torch": ("https://pytorch.org/docs/stable", None), - "flax": ("https://flax.readthedocs.io/en/latest", None), - "fairscale": ("https://fairscale.readthedocs.io/en/latest/", None), - "datasets": ("https://huggingface.co/docs/datasets/master/en", None), "transformers": ("https://huggingface.co/docs/transformers/master/en", None), - "beaker": ("https://beaker-py.readthedocs.io/en/latest/", None), + "diffusers": ("https://huggingface.co/docs/diffusers/main/en", None), } # Tell myst-parser to assign header anchors for h1-h3. diff --git a/src/ncsn/pipeline_ncsn.py b/src/ncsn/pipeline_ncsn.py index 09036ee..0008e7b 100644 --- a/src/ncsn/pipeline_ncsn.py +++ b/src/ncsn/pipeline_ncsn.py @@ -38,13 +38,13 @@ class NCSNPipeline(DiffusionPipeline): r""" Pipeline for unconditional image generation using Noise Conditional Score Network (NCSN). - This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods + This model inherits from :py:class:`~diffusers.DiffusionPipeline`. Check the superclass documentation for the generic methods implemented for all pipelines (downloading, saving, running on a particular device, etc.). Parameters: - unet ([`UNet2DModelForNCSN`]): + unet (:py:class:`~ncsn.unet.UNet2DModelForNCSN`): A `UNet2DModelForNCSN` to estimate the score of the image. - scheduler ([`AnnealedLangevinDynamicsScheduler`]): + scheduler (:py:class:`~ncsn.scheduler.AnnealedLangevinDynamicsScheduler`): A `AnnealedLangevinDynamicsScheduler` to be used in combination with `unet` to estimate the score of the image. """ @@ -63,11 +63,11 @@ def decode_samples(self, samples: torch.Tensor) -> torch.Tensor: r"""Decodes the generated samples to the correct format suitable for images. Args: - samples (torch.Tensor): + samples (:py:class:`torch.Tensor`): The generated samples to decode. Returns: - torch.Tensor: The decoded samples. + :py:class:`torch.Tensor`: The decoded samples. """ # Normalize the generated image samples = normalize_images(samples) @@ -102,8 +102,7 @@ def __call__( num_inference_steps (`int`, *optional*, defaults to 10): The number of inference steps. generator (`torch.Generator`, `optional`): - A [`torch.Generator`](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make - generation deterministic. + A :py:class:`torch.Generator` to make generation deterministic. output_type (`str`, `optional`, defaults to `"pil"`): The output format of the generated image. Choose between `PIL.Image` or `np.array`. return_dict (`bool`, *optional*, defaults to `True`): @@ -119,8 +118,8 @@ def __call__( `._callback_tensor_inputs` attribute of your pipeline class. Returns: - `diffusers.pipelines.ImagePipelineOutput` or `tuple`: - If `return_dict` is `True`, `diffusers.pipelines.ImagePipelineOutput` is returned, otherwise a `tuple` is + :py:class:`diffusers.ImagePipelineOutput` or `tuple`: + If `return_dict` is `True`, :py:class:`diffusers.ImagePipelineOutput` is returned, otherwise a `tuple` is returned where the first element is a list with the generated images. """ callback_on_step_end_tensor_inputs = ( diff --git a/src/ncsn/scheduler/scheduling_ncsn.py b/src/ncsn/scheduler/scheduling_ncsn.py index 9581e83..9225742 100644 --- a/src/ncsn/scheduler/scheduling_ncsn.py +++ b/src/ncsn/scheduler/scheduling_ncsn.py @@ -22,7 +22,7 @@ class AnnealedLangevinDynamicsOutput(SchedulerOutput): class AnnealedLangevinDynamicsScheduler(SchedulerMixin, ConfigMixin): # type: ignore r"""Annealed Langevin Dynamics scheduler for Noise Conditional Score Networks (NCSN). - This scheduler inherits from [`SchedulerMixin`]. Check the superclass documentation for it's generic methods implemented for all schedulers (such as downloading or saving). + This scheduler inherits from :py:class:`~diffusers.SchedulerMixin`. Check the superclass documentation for it's generic methods implemented for all schedulers (such as downloading or saving). Args: num_train_timesteps (`int`): Number of training timesteps. diff --git a/src/ncsn/unet/unet_2d_ncsn.py b/src/ncsn/unet/unet_2d_ncsn.py index e8a5c8c..7542ac1 100644 --- a/src/ncsn/unet/unet_2d_ncsn.py +++ b/src/ncsn/unet/unet_2d_ncsn.py @@ -10,7 +10,7 @@ class UNet2DModelForNCSN(UNet2DModel, ModelMixin, ConfigMixin): # type: ignore[misc] r"""A 2D UNet model for Noise Conditional Score Networks (NCSN). - This model inherits from [`diffusers.UNet2DModel`], which is a 2D UNet model that takes a noisy sample and a timestep and returns a sample shaped output. This model also inherits from [`diffusers.models.modeling_utils.ModelMixin`]. Check the superclass documentation for it's generic methods implemented for all models (such as downloading or saving). + This model inherits from :py:class:`~diffusers.UNet2DModel`, which is a 2D UNet model that takes a noisy sample and a timestep and returns a sample shaped output. This model also inherits from :py:class:`~diffusers.ModelMixin`. Check the superclass documentation for it's generic methods implemented for all models (such as downloading or saving). Args: sigma_min (`float`): Minimum standard deviation for the isotropic Gaussian noise. @@ -50,7 +50,7 @@ class UNet2DModelForNCSN(UNet2DModel, ModelMixin, ConfigMixin): # type: ignore[ `resnet_time_scale_shift` is set to `default`, and if created will have `norm_num_groups` groups. norm_eps (`float`, *optional*, defaults to `1e-5`): The epsilon for normalization. resnet_time_scale_shift (`str`, *optional*, defaults to `"default"`): Time scale shift config - for ResNet blocks (see [`~models.resnet.ResnetBlock2D`]). Choose from `default` or `scale_shift`. + for ResNet blocks (see :py:class:`~diffusers.ResnetBlock2D`). Choose from `default` or `scale_shift`. class_embed_type (`str`, *optional*, defaults to `None`): The type of class embedding to use which is ultimately summed with the time embeddings. Choose from `None`, `"timestep"`, or `"identity"`. From 9bd3ad822c8013a4b2992d70823c0102a6f44e21 Mon Sep 17 00:00:00 2001 From: Shunsuke KITADA Date: Wed, 8 Jan 2025 17:43:39 +0900 Subject: [PATCH 7/9] update for CI --- .github/workflows/gh-pages.yaml | 46 +++++++++++++++++++++++++++++++++ docs/source/conf.py | 1 + 2 files changed, 47 insertions(+) create mode 100644 .github/workflows/gh-pages.yaml diff --git a/.github/workflows/gh-pages.yaml b/.github/workflows/gh-pages.yaml new file mode 100644 index 0000000..1967f23 --- /dev/null +++ b/.github/workflows/gh-pages.yaml @@ -0,0 +1,46 @@ +name: Document + +on: push + +# Allow one concurrent deployment +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Typo check + uses: crate-ci/typos@v1.29.0 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Setup Python + run: uv python install + + - name: Install the project + run: uv sync --all-extras --dev + + - name: Build HTML + run: uv run sphinx-build -M html docs/source docs/build + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: html-docs + path: docs/build/html + + - name: Deploy + uses: peaceiris/actions-gh-pages@v4 + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./public + publish_branch: gh-pages diff --git a/docs/source/conf.py b/docs/source/conf.py index 1ca64c0..5a4a430 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -72,3 +72,4 @@ html_theme = "furo" html_static_path = ["_static"] +html_favicon = "https://huggingface.co/front/assets/huggingface_logo-noborder.svg" From 5d85f80f4b98eac2243eccf5af1b04f7915b5cc5 Mon Sep 17 00:00:00 2001 From: Shunsuke KITADA Date: Wed, 8 Jan 2025 17:53:41 +0900 Subject: [PATCH 8/9] update --- .github/workflows/gh-pages.yaml | 24 ++++++++++-------------- Makefile | 16 ++++++++++++---- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/.github/workflows/gh-pages.yaml b/.github/workflows/gh-pages.yaml index 1967f23..4a9d200 100644 --- a/.github/workflows/gh-pages.yaml +++ b/.github/workflows/gh-pages.yaml @@ -19,23 +19,19 @@ jobs: - name: Typo check uses: crate-ci/typos@v1.29.0 - - name: Install uv - uses: astral-sh/setup-uv@v5 - - - name: Setup Python - run: uv python install + - name: Setup python + uses: actions/setup-python@v5 + with: + python-version: "3.10" - - name: Install the project - run: uv sync --all-extras --dev + - name: Install dependencies + run: | + make setup + make install - name: Build HTML - run: uv run sphinx-build -M html docs/source docs/build - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: html-docs - path: docs/build/html + run: | + make html - name: Deploy uses: peaceiris/actions-gh-pages@v4 diff --git a/Makefile b/Makefile index d35dfdb..fe98315 100644 --- a/Makefile +++ b/Makefile @@ -6,20 +6,24 @@ setup: pip install -U uv -.PHONY: install +.PHONY: install-training install: uv sync --extra training +.PHONY: install-doc +install-doc: + uv sync --extra doc + # # linter/formatter/typecheck # .PHONY: lint -lint: install +lint: install-training uv run ruff check --output-format=github . .PHONY: format -format: install +format: install-training uv run ruff format --check --diff . .PHONY: typecheck @@ -27,5 +31,9 @@ typecheck: install uv run mypy --cache-dir=/dev/null . .PHONY: test -test: install +test: install-training uv run pytest -vsx --log-cli-level=INFO + +.PHONY: html +html: install-doc + uv run sphinx-build -M html docs/source docs/build From bc4fb3f3722747e337d02d21795a2a2748ad29bf Mon Sep 17 00:00:00 2001 From: Shunsuke KITADA Date: Wed, 8 Jan 2025 17:56:22 +0900 Subject: [PATCH 9/9] update README.md --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 3214ee9..61fe63b 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # 🤗 Noise Conditional Score Networks [![CI](https://github.com/py-img-gen/diffusers-ncsn/actions/workflows/ci.yaml/badge.svg)](https://github.com/py-img-gen/diffusers-ncsn/actions/workflows/ci.yaml) -[![](https://img.shields.io/badge/Official_code-GitHub-green)](https://github.com/ermongroup/ncsn) +[![Document](https://github.com/py-img-gen/diffusers-ncsn/actions/workflows/gh-pages.yaml/badge.svg)](https://github.com/py-img-gen/diffusers-ncsn/actions/workflows/gh-pages.yaml) +[![ermongroup/ncsn](https://img.shields.io/badge/Official_code-GitHub-green)](https://github.com/ermongroup/ncsn) [![Model on HF](https://img.shields.io/badge/🤗%20Model%20on%20HF-py--img--gen/ncsn--mnist-D4AA00)](https://huggingface.co/py-img-gen/ncsn-mnist) [`🤗 diffusers`](https://github.com/huggingface/diffusers) implementation of the paper ["Generative Modeling by Estimating Gradients of the Data Distribution" [Yang+ NeurIPS'19]](https://arxiv.org/abs/1907.05600).