Skip to content

Commit 48f6493

Browse files
Release 0.17.0 (#2691)
- Bump versions - Fix a few TODO comments - A bit of clean up in test_target_paramters.py
1 parent 337be05 commit 48f6493

File tree

5 files changed

+11
-21
lines changed

5 files changed

+11
-21
lines changed

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from setuptools import find_packages, setup
1616

1717

18-
VERSION = "0.16.1.dev0"
18+
VERSION = "0.17.0"
1919

2020
extras = {}
2121
extras["quality"] = [

src/peft/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
__version__ = "0.16.1.dev0"
15+
__version__ = "0.17.0"
1616

1717
from .auto import (
1818
MODEL_TYPE_TO_PEFT_MODEL_MAPPING,

tests/test_custom_models.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1259,7 +1259,7 @@ def forward(self, X):
12591259

12601260

12611261
class _LinearUsingParameter(nn.Module):
1262-
# TODO
1262+
# Linear layer equivalent
12631263
def __init__(self, in_features, out_features, bias=None):
12641264
super().__init__()
12651265
self.in_features = in_features
@@ -1273,7 +1273,7 @@ def forward(self, x):
12731273

12741274

12751275
class MlpUsingParameters(nn.Module):
1276-
# TODO
1276+
# MLP that uses layers whose parameters need to be targeted with target_parameters
12771277
def __init__(self, bias=True):
12781278
super().__init__()
12791279

tests/test_gpu_examples.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3825,7 +3825,7 @@ def setUp(self):
38253825
# torchao breaks with fp16 and if a previous test uses fp16, transformers will set this env var, which affects
38263826
# subsequent tests, therefore the env var needs to be cleared explicitly
38273827
#
3828-
# TODO: remove this once https://github.com/huggingface/transformers/pull/37259 is merged
3828+
# TODO: remove this once https://github.com/huggingface/transformers/pull/39483 is merged
38293829
os.environ.pop("ACCELERATE_MIXED_PRECISION", None)
38303830

38313831
def tearDown(self):

tests/test_target_parameters.py

Lines changed: 6 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
import torch
1717
from transformers import AutoModelForCausalLM
1818

19-
from peft import LoraConfig, get_peft_model
19+
from peft import LoraConfig, TaskType, get_peft_model
2020

2121
from .testing_common import PeftCommonTester, hub_online_once
2222
from .testing_utils import set_init_weights_false
@@ -26,18 +26,14 @@
2626
"trl-internal-testing/tiny-Llama4ForCausalLM",
2727
]
2828

29-
# TODO Missing from this list are LoKr, LoHa, LN Tuning, add them
3029
ALL_CONFIGS = [
3130
# target down_proj
3231
(
3332
LoraConfig,
3433
{
35-
"task_type": "CAUSAL_LM",
36-
"r": 8,
37-
"lora_alpha": 32,
38-
"target_modules": None,
34+
"task_type": TaskType.CAUSAL_LM,
35+
"target_modules": [],
3936
"lora_dropout": 0.0,
40-
"bias": "none",
4137
"target_parameters": [
4238
"feed_forward.experts.down_proj",
4339
],
@@ -47,12 +43,9 @@
4743
(
4844
LoraConfig,
4945
{
50-
"task_type": "CAUSAL_LM",
51-
"r": 8,
52-
"lora_alpha": 32,
53-
"target_modules": None,
46+
"task_type": TaskType.CAUSAL_LM,
47+
"target_modules": [],
5448
"lora_dropout": 0.0,
55-
"bias": "none",
5649
"target_parameters": [
5750
"0.feed_forward.experts.gate_up_proj",
5851
"1.feed_forward.experts.down_proj",
@@ -79,12 +72,9 @@
7972
(
8073
LoraConfig,
8174
{
82-
"task_type": "CAUSAL_LM",
83-
"r": 8,
84-
"lora_alpha": 32,
75+
"task_type": TaskType.CAUSAL_LM,
8576
"target_modules": ["q_proj", "v_proj"],
8677
"lora_dropout": 0.0,
87-
"bias": "none",
8878
"target_parameters": [
8979
"feed_forward.experts.down_proj",
9080
],

0 commit comments

Comments
 (0)