Skip to content

Commit eb04b42

Browse files
authored
fix insecure deserialization when calling torch.load() (#4202)
1 parent b2c640e commit eb04b42

File tree

6 files changed

+5
-146
lines changed

6 files changed

+5
-146
lines changed

lmdeploy/lite/apis/auto_awq.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def auto_awq(model: str,
9898
layer_type = LAYER_TYPE_MAP[type(model).__name__]
9999
fc2fcs = FC_FCS_MAP[layer_type]
100100
norm2fcs = NORM_FCS_MAP[layer_type]
101-
input_stats = torch.load(osp.join(work_dir, 'inputs_stats.pth'))
101+
input_stats = torch.load(osp.join(work_dir, 'inputs_stats.pth'), weights_only=True)
102102
layers = collect_target_modules(model, layer_type)
103103
fcs = {}
104104
for l_name, layer in layers.items():

lmdeploy/lite/apis/get_small_sharded_hf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def main():
3838

3939
checkpoints = set(index['weight_map'].values())
4040
for ckpt in checkpoints:
41-
state_dict = torch.load(os.path.join(args.src_dir, ckpt), map_location='cuda')
41+
state_dict = torch.load(os.path.join(args.src_dir, ckpt), map_location='cuda', weights_only=True)
4242
keys = sorted(list(state_dict.keys()))
4343
for k in keys:
4444
new_state_dict_name = 'pytorch_model-{:05d}-of-{:05d}.bin'.format(cnt, n_shard)

lmdeploy/lite/apis/kv_qparams.py

Lines changed: 0 additions & 141 deletions
This file was deleted.

lmdeploy/lite/apis/smooth_quant.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def smooth_quant(model: str,
5858

5959
# calibrate function exports the calibration statistics
6060
# (inputs, outputs, keys and values) to `work_dir`.
61-
inp_stats = torch.load(work_dir / 'inputs_stats.pth')
61+
inp_stats = torch.load(work_dir / 'inputs_stats.pth', weights_only=True)
6262
act_scales = inp_stats['absmax']
6363

6464
model_type = type(model).__name__

lmdeploy/turbomind/deploy/loader.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ def items(self):
119119
params = defaultdict(dict)
120120
for shard in self.shards:
121121
misc = {}
122-
tmp = torch.load(shard, map_location='cpu')
122+
tmp = torch.load(shard, map_location='cpu', weights_only=True)
123123
for k, v in tmp.items():
124124
match = re.findall(self.pattern, k)
125125
if not match:

lmdeploy/vl/model/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def load_weight_ckpt(ckpt: str) -> Dict[str, torch.Tensor]:
1919
if ckpt.endswith('.safetensors'):
2020
return load_file(ckpt)
2121
else:
22-
return torch.load(ckpt)
22+
return torch.load(ckpt, weights_only=True)
2323

2424

2525
def get_used_weight_files(folder: str, state_dict: Dict[str, torch.Tensor]) -> List[str]:

0 commit comments

Comments
 (0)