123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134 |
- # -*-coding:UTF-8-*-
- """get prediction for each backend
- """
- import sys
- import os
- sys.path.append("../")
- import numpy as np
- import pickle
- import argparse
- import configparser
- from scripts.tools.utils import DataUtils
- from scripts.logger.lemon_logger import Logger
- import warnings
- main_logger = Logger()
- def custom_objects():
- def no_activation(x):
- return x
- def leakyrelu(x):
- import keras.backend as K
- return K.relu(x, alpha=0.01)
- objects = {}
- objects['no_activation'] = no_activation
- objects['leakyrelu'] = leakyrelu
- return objects
- def _get_prediction(bk, x, y, model_path,batch_size):
- """
- Get prediction of models on different backends
- """
- test_x, test_y = x[:flags.test_size],y[:flags.test_size]
- predict_model = keras.models.load_model(model_path,custom_objects=custom_objects())
- # predict_model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
- main_logger.info("INFO:load model and compile done!")
- res = predict_model.predict(test_x,batch_size=batch_size)
- root_dir = model_path.split("origin_model")[0]
-
- npy_path = root_dir + 'res.npy' # 保存模型预测结果的路径,patch_prediction_extractor.py中的44行改成一样的路径
- #test_x:测试集
- #batch_size:单次训练数据样本大小
- np.save(npy_path,res)
- #把预测结果保存到本地
- main_logger.info("SUCCESS:Get prediction for {} successfully on {}!".format(mut_model_name,bk))
- """Store prediction result to redis"""
- # redis_conn.hset("prediction_{}".format(mut_model_name),bk,pickle.dumps(res))
- if __name__ == "__main__":
- """Parser of command args"""
- parse = argparse.ArgumentParser()
- parse.add_argument("--backend", type=str, help="name of backends")
- parse.add_argument("--exp", type=str, help="experiments identifiers")
- parse.add_argument("--test_size", type=int, help="amount of testing image")
- parse.add_argument("--model", type=str, help="path of the model to predict")
- #parse.add_argument("--redis_db", type=int)
- parse.add_argument("--config_name", type=str)
- flags, unparsed = parse.parse_known_args(sys.argv[1:])
-
- """Load Configuration"""
- warnings.filterwarnings("ignore")
- lemon_cfg = configparser.ConfigParser()
- # lemon_cfg.read(f"./config/{flags.config_name}")
- root_dir = flags.model.split("origin_model")[0]
- cfg_path = os.path.join(os.path.dirname(os.getcwd()), "config", flags.config_name)
- lemon_cfg.read(cfg_path)
- #pool = redis.ConnectionPool(host=lemon_cfg['redis']['host'], port=lemon_cfg['redis']['port'],db=flags.redis_db)
- #redis_conn = redis.Redis(connection_pool=pool)
- parameters = lemon_cfg['parameters']
- # gpu_ids = parameters['gpu_ids']
- # gpu_list = parameters['gpu_ids'].split(",")
- """Init cuda"""
- #os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
- #os.environ["CUDA_VISIBLE_DEVICES"] = gpu_ids
- warnings.filterwarnings("ignore")
- batch_size= 32
- """Switch backend"""
- bk_list = ['tensorflow', 'mxnet']
- bk = flags.backend
- os.environ['KERAS_BACKEND'] = bk
- os.environ['PYTHONHASHSEED'] = '0'
- if bk == 'tensorflow':
- os.environ["TF_CPP_MIN_LOG_LEVEL"] = '2' # 只显示 warning 和 Error
- import tensorflow as tf
- main_logger.info(tf.__version__)
- batch_size = 128
- import keras
- if bk == 'theano':
- # if len(gpu_list) == 2:
- # os.environ['THEANO_FLAGS'] = f"device=cuda,contexts=dev{gpu_list[0]}->cuda{gpu_list[0]};dev{gpu_list[1]}->cuda{gpu_list[1]}," \
- # f"force_device=True,floatX=float32,lib.cnmem=1"
- # else:
- # os.environ['THEANO_FLAGS'] = f"device=cuda,contexts=dev{gpu_list[0]}->cuda{gpu_list[0]}," \
- # f"force_device=True,floatX=float32,lib.cnmem=1"
- import theano as th
- import keras
- main_logger.info(th.__version__)
- if bk == "cntk":
- #from cntk.device import try_set_default_device,gpu
- #try_set_default_device(gpu(int(gpu_list[0])))
- import cntk as ck
- main_logger.info(ck.__version__)
- import keras
- if bk == "mxnet":
- import mxnet as mxnet
- main_logger.info(f"mxnet_version {mxnet.__version__}")
- import keras
- batch_size = 16
- from keras import backend as K
- try:
- """Get model prediction"""
- main_logger.info("INFO:Using {} as backend for states extraction| {} is wanted".format(K.backend(),bk))
- x, y = DataUtils.get_data_by_exp(flags.exp)#读取数据集并做转换
- mut_model_name = os.path.split(flags.model)[-1]
- print(flags.model)
- _get_prediction(bk=bk, x=x, y=y, model_path=flags.model,batch_size=batch_size)
- except Exception:
- import traceback
- traceback.print_exc()
- sys.exit(-1)
|