SwinFace / utils /paraProcess.py
=
add injection
391f8df
"""
处理参数,如参数翻转、参数替换等等
"""
import os
import torch
import random
import struct
import pandas as pd
from bitstring import BitArray
from concurrent.futures import ThreadPoolExecutor
import numpy as np
import math
def split_file(file_path, chunk_size=8):
"""
分割源文件成为元素BitArray的list
:param file_path: 源文件路径
:param chunk_size: 分割粒度 Bit
:return: 返回一个元素BitArray的list
"""
# 以bit的形式读取文件
bit_data = BitArray(filename = file_path)
chunks = [bit_data[i:i+chunk_size] for i in range(0, len(bit_data), chunk_size)]
return chunks
def merge_file(output_file, chunks):
"""
将BitArray的list合并成一个文件
:param output_file: 目标文件路径
:param chunks: BitArray的list
:return: 合并后的文件
"""
merge_data = BitArray()
for chunk in chunks:
merge_data.append(chunk)
with open(output_file, 'wb') as file:
merge_data.tofile(file)
def layer_low_n_bit_fLip(initParaPath, flipParaPath, bit_n, *layers):
"""
翻转pth的layers层fa的低n bit
:param initParaPath: 原始参数pth
:param flipParaPath: 翻转之后的参数pth
:param bit_n: 翻转低多少bit
:return: void
"""
para = torch.load(initParaPath)
for layer in layers: # layers数组中的所有layer
if len(para[layer].data.shape) < 1:
continue # 单值除去
# print(layer, type(layer))
layerTensor = para[layer].data
# print(layerTensor.shape)
layerTensor_initView = layerTensor.view(torch.int32)
# print(format(layerTensor_initView[0][0][0][0], '032b'), layerTensor[0][0][0][0])
layerTensor_embedded_int = layerTensor_initView ^ bit_n
layerTensor_embedded = layerTensor_embedded_int.view(torch.float32)
# print(format(layerTensor_embedded_int[0][0][0][0], '032b'), layerTensor_embedded[0][0][0][0])
para[layer].data = layerTensor_embedded
torch.save(para, flipParaPath)
return
if __name__ == "__main__":
print("Test Done")