刘凡 2 éve
szülő
commit
ad9a4f33ff
100 módosított fájl, 2303 hozzáadás és 0 törlés
  1. 18 0
      target/File/advance_touch_1.py
  2. 5 0
      target/File/conftest_1.py
  3. 67 0
      target/File/data_preprocessing_utils_1.py
  4. 68 0
      target/File/diml_to_interiornet_2.py
  5. 20 0
      target/File/ego_to_json_2.py
  6. 12 0
      target/File/ego_to_json_3.py
  7. 31 0
      target/File/ego_to_json_4.py
  8. 35 0
      target/File/ego_to_json_5.py
  9. 8 0
      target/File/ego_to_json_6.py
  10. 17 0
      target/File/esquema_1.py
  11. 19 0
      target/File/esquema_2.py
  12. 25 0
      target/File/esquema_3.py
  13. 41 0
      target/File/esquema_4.py
  14. 37 0
      target/File/file_handler_1.py
  15. 24 0
      target/File/generate_directories_4.py
  16. 9 0
      target/File/generate_directories_5.py
  17. 9 0
      target/File/generate_directories_6.py
  18. 41 0
      target/File/logging_1.py
  19. 29 0
      target/File/logging_2.py
  20. 28 0
      target/File/logging_3.py
  21. 28 0
      target/File/logging_4.py
  22. 34 0
      target/File/logging_5.py
  23. 26 0
      target/File/make_folder_1.py
  24. 25 0
      target/File/mkdir_2.py
  25. 28 0
      target/File/mkdir_4.py
  26. 8 0
      target/File/mkdir_p_1.py
  27. 24 0
      target/File/project_creator_2.py
  28. 41 0
      target/File/project_creator_3.py
  29. 22 0
      target/File/setup_2.py
  30. 23 0
      target/File/split_data_in_k_folds_1.py
  31. 22 0
      target/File/split_data_in_k_folds_2.py
  32. 71 0
      target/File/stc_vid2frames_1.py
  33. 23 0
      target/File/test_archive_10.py
  34. 21 0
      target/File/test_archive_3.py
  35. 28 0
      target/File/test_archive_4.py
  36. 20 0
      target/File/test_archive_5.py
  37. 21 0
      target/File/test_archive_6.py
  38. 21 0
      target/File/test_archive_7.py
  39. 11 0
      target/File/test_archive_8.py
  40. 24 0
      target/File/test_archive_9.py
  41. 9 0
      target/File/test_tool_2.py
  42. 6 0
      target/File/test_tool_3.py
  43. 33 0
      target/File/test_tool_4.py
  44. 35 0
      target/File/test_tool_5.py
  45. 18 0
      target/File/test_tool_6.py
  46. 25 0
      target/File/test_tool_7.py
  47. 8 0
      target/File/tutorial_1.py
  48. 55 0
      target/File/tutorial_2.py
  49. 23 0
      target/File/tutorial_3.py
  50. 34 0
      target/File/tutorial_4.py
  51. 22 0
      target/File/tutorial_5.py
  52. 31 0
      target/File/tutorial_6.py
  53. 21 0
      target/File/tutorial_7.py
  54. 22 0
      target/File/tutorial_8.py
  55. 40 0
      target/File/tutorial_9.py
  56. 8 0
      target/File/utils_1.py
  57. 20 0
      target/File/utils_2.py
  58. 9 0
      target/File/utils_3.py
  59. 5 0
      target/File/utils_4.py
  60. 6 0
      target/File/visualize_2_1.py
  61. 33 0
      target/Hash/EncrypC_12.py
  62. 33 0
      target/Hash/EncrypC_14.py
  63. 27 0
      target/Hash/EncrypC_2.py
  64. 27 0
      target/Hash/EncrypC_3.py
  65. 31 0
      target/Hash/EncrypC_5.py
  66. 21 0
      target/Hash/EncryptionDecryption_1.py
  67. 18 0
      target/Hash/EncryptionDecryption_2.py
  68. 8 0
      target/Hash/Encryption_And_Hashing/Task2_6.py
  69. 13 0
      target/Hash/Encryption_And_Hashing/Task2_7.py
  70. 15 0
      target/Hash/Encryption_And_Hashing/Task2_8.py
  71. 15 0
      target/Hash/Encryption_And_Hashing/Task4_6.py
  72. 14 0
      target/Hash/Encryption_And_Hashing/Task4_7.py
  73. 13 0
      target/Hash/Encryption_And_Hashing/Task4_8.py
  74. 39 0
      target/Hash/Encryption_And_Hashing/Task5_3.py
  75. 9 0
      target/Hash/Encryption_And_Hashing/Task7_3.py
  76. 10 0
      target/Hash/Encryption_And_Hashing/Task7_4.py
  77. 11 0
      target/Hash/Encryption_And_Hashing/Task7_5.py
  78. 13 0
      target/Hash/Task1_7.py
  79. 16 0
      target/Hash/Task1_8.py
  80. 13 0
      target/Hash/base64_2.py
  81. 16 0
      target/Hash/base64_3.py
  82. 7 0
      target/Hash/base64_4.py
  83. 3 0
      target/Hash/base64_5.py
  84. 74 0
      target/Hash/biometry_hash_1.py
  85. 3 0
      target/Hash/biometry_hash_10.py
  86. 21 0
      target/Hash/biometry_hash_12.py
  87. 35 0
      target/Hash/biometry_hash_2.py
  88. 39 0
      target/Hash/biometry_hash_3.py
  89. 69 0
      target/Hash/biometry_hash_5.py
  90. 29 0
      target/Hash/biometry_hash_8.py
  91. 19 0
      target/Hash/biometry_hash_9.py
  92. 25 0
      target/Hash/crypto_1.py
  93. 3 0
      target/Hash/crypto_3.py
  94. 2 0
      target/Hash/crypto_4.py
  95. 8 0
      target/Hash/crypto_5.py
  96. 15 0
      target/Hash/crypto_6.py
  97. 25 0
      target/Hash/crypto_7.py
  98. 17 0
      target/Hash/crypto_8.py
  99. 8 0
      target/Hash/dirist_14.py
  100. 12 0
      target/Hash/dirist_15.py

+ 18 - 0
target/File/advance_touch_1.py

@@ -0,0 +1,18 @@
+def advance_touch(paths, cd):
+    """ Make folders and files """
+    for path in paths:
+        # Make folders
+        new_dirs = '/'.join(path.split('/')[0:-1])
+        if not os.path.exists(new_dirs) and new_dirs != '':
+            os.makedirs(new_dirs)
+        # Change directory
+        if cd:
+            cd_path = os.path.join(os.getcwd(), new_dirs) + '/'
+            os.chdir(cd_path)
+
+        # Make file
+        if not path.endswith('/') and not os.path.isfile(path):
+            try:
+                open(path, 'w+').close()
+            except IsADirectoryError:
+                pass

+ 5 - 0
target/File/conftest_1.py

@@ -0,0 +1,5 @@
+def create_file(path: str, content: str):
+    """Create txt file with specific content"""
+    with open(f"{path}", "w") as file:
+        file.write(content)
+

+ 67 - 0
target/File/data_preprocessing_utils_1.py

@@ -0,0 +1,67 @@
+def convert_MAP(directory, output_directory, min_shape, file_format = '.npy', search_keys = None, dtype = np.float32):
+    '''
+    This program loops through given raw_data directory
+    and converts .mat files to .npy files
+    '''
+    new_dir = os.path.join(os.getcwd(), output_directory)
+    if not os.path.exists(new_dir):
+        os.mkdir(new_dir)
+    else:
+        shutil.rmtree(new_dir)
+        os.mkdir(new_dir)
+    for file in os.listdir(directory):
+        filename = os.fsdecode(file)
+        if filename.endswith(".mat"): 
+            #print(os.path.join(directory, filename))
+            filepath = os.path.join(directory, filename)
+            array_dict = {}
+            try:
+                f = h5py.File(filepath, 'r')
+            except:
+                f = sio.loadmat(filepath)
+            for k, v in f.items():
+                array_dict[k] = np.array(v, dtype = np.float32)
+            # As we only need image info from dict (the last key) we do this
+            if search_keys == None:
+                search_keys = 'map' # out of struct of .mat files want "map"
+                filtered_dict = dict(filter(lambda item: search_keys in item[0], array_dict.items()))
+            else:
+                filtered_dict = {}
+                for i in range(len(search_keys)):
+                    search_key = search_keys[i]
+                    if search_key in array_dict:
+                        filtered_dict[search_key] = array_dict[search_key]
+            if len(filtered_dict) == 0:
+                print('No Data to Meet Search Key Requirements: Datapoint Rejected -> ' + filepath)
+            else:
+                #print(list(array_dict.keys()))
+                #print(filtered_dict)
+                arrays = []
+                for k, v in filtered_dict.items():
+                    temp = np.transpose(v.astype(np.float32))
+                    # To normalize data between [-1,1], use -> arrays = arrays/(np.max(arrays)/2) - 1
+                    # To normalize data between [0,1], use -> arrays = arrays/(np.max(arrays))
+                    # To normalize data between [0,255], 
+                    #     use -> arrays = (arrays/(np.max(arrays))*255).astype(np.uint8)
+                    temp = temp/(np.max(temp))
+                    arrays.append(temp)
+                for i in range(len(arrays)):
+                    if len(arrays[i].shape) > 2:
+                        #print(arrays[i].shape)
+                        arrays[i] = np.mean(arrays[i], axis = 2)
+
+                for i in range(len(arrays)):
+                    new_dir_filepath = os.path.join(new_dir, filename.strip('.mat') 
+                                                    + '_index'+str(i) + file_format)
+                    array = arrays[i]
+                    if array.shape[0] >= min_shape[0] and array.shape[1] >= min_shape[1]:
+                        if file_format == '.npy':
+                            np.save(new_dir_filepath, array, allow_pickle=True, fix_imports=True)
+                        else:
+                            imageio.imwrite(new_dir_filepath, array)
+                    elif i == 0:
+                        print('Min Size Not Met: Datapoint Rejected -> ' + filepath)
+    return os.path.join(os.getcwd(), output_directory)
+
+##################################################################################################################################
+# Data Cleaning Procedures:

+ 68 - 0
target/File/diml_to_interiornet_2.py

@@ -0,0 +1,68 @@
+def full_to_interiornet():
+    scene_file_path = "/nvme/datasets/diml_depth/scenes.txt"
+    base_path = "/nvme/datasets/diml_depth/"
+    out_path = "/nvme/datasets/diml_depth/HD7/"
+    cam0_render = "/nvme/datasets/interiornet/3FO4IDEI1LAV_Bedroom/cam0.render"
+    num_frames = 20
+    shape = (672, 378)
+    np.random.seed(123)
+
+    with open(scene_file_path, 'r') as f:
+        scene_lines = f.readlines()
+
+    scene_lines = [sn.split('\n')[0] for sn in scene_lines]
+    scene_paths = [os.path.join(base_path, sn.split('-')[0]) for sn in scene_lines]
+    scene_ranges = [sn.split('-')[1] for sn in scene_lines]
+    scene_ranges = [(int(rn[1:-1].split(':')[0]), int(rn[1:-1].split(':')[1])) for rn in scene_ranges]
+
+    for i, scene_path in enumerate(scene_paths):
+        file_list = []
+        for j in range(scene_ranges[i][0], scene_ranges[i][1]+1):
+            scene_path_col = os.path.join(scene_path, "{}/col".format(j))
+            if os.path.exists(scene_path_col):
+                file_list += [os.path.join(scene_path_col, dn) for dn in os.listdir(scene_path_col)]
+
+        scene_count = len(os.listdir(out_path))
+        scene_out_path = "{:02d}DIML_{}".format(scene_count + 1, scene_path.split('/')[-2].split(' ')[1])
+        scene_out_path = os.path.join(out_path, scene_out_path)
+
+        if os.path.exists(scene_out_path):
+            shutil.rmtree(scene_out_path)
+        os.mkdir(scene_out_path)
+        os.mkdir(os.path.join(scene_out_path, "cam0"))
+        os.mkdir(os.path.join(scene_out_path, "depth0"))
+        os.mkdir(os.path.join(scene_out_path, "label0"))
+        os.mkdir(os.path.join(scene_out_path, "cam0", "data"))
+        os.mkdir(os.path.join(scene_out_path, "depth0", "data"))
+        os.mkdir(os.path.join(scene_out_path, "label0", "data"))
+        shutil.copyfile(cam0_render, os.path.join(scene_out_path, "cam0.render"))
+        print(scene_out_path)
+
+        frame_paths = np.random.choice(file_list, num_frames, False)
+        for j, frame_path in enumerate(frame_paths):
+            img = cv2.imread(frame_path, cv2.IMREAD_UNCHANGED)
+            depth_path = frame_path.replace('/col/', '/up_png/')
+            depth_path = depth_path.replace('_c.png', '_ud.png')
+            depth_img = cv2.imread(depth_path, cv2.IMREAD_UNCHANGED)
+
+            if depth_img is None:
+                print(depth_path)
+                exit()
+            if img is None:
+                print(frame_path)
+                exit()
+
+            img = cv2.resize(img, dsize=shape, interpolation=cv2.INTER_LINEAR)
+            depth_img = cv2.resize(depth_img, dsize=shape,
+                                   interpolation=cv2.INTER_LINEAR)
+            label_img = depth_img.copy()
+            label_img[:, :] = 3
+
+            cv2.imwrite(os.path.join(scene_out_path, "cam0", "data", "{}.png".format(j)), img)
+            cv2.imwrite(os.path.join(scene_out_path, "depth0", "data", "{}.png".format(j)), depth_img)
+            cv2.imwrite(os.path.join(scene_out_path, "label0", "data", "{}_instance.png".format(j)), label_img)
+            cv2.imwrite(os.path.join(scene_out_path, "label0", "data", "{}_nyu.png".format(j)), label_img)
+
+
+if __name__ == '__main__':
+    full_to_interiornet()

+ 20 - 0
target/File/ego_to_json_2.py

@@ -0,0 +1,20 @@
+def json_test():
+    # test_dir = os.path.join(ROOT_DIR, "test")
+    # os.makedirs(os.path.join(test_dir, "images"))
+    # img_dir = os.path.join(test_dir, "images")
+
+    # create_annotations(test_dir,img_dir)
+
+    # val_dir = os.path.join(ROOT_DIR, "val")
+    # os.makedirs(os.path.join(val_dir, "images"))
+    # img_dir = os.path.join(val_dir, "images")
+
+    # create_annotations(val_dir,img_dir)
+
+    train_dir = os.path.join(ROOT_DIR, "train")
+    # os.makedirs(os.path.join(train_dir, "images"))
+    img_dir = os.path.join(train_dir, "images")
+
+    create_annotations(train_dir,img_dir)
+
+   

+ 12 - 0
target/File/ego_to_json_3.py

@@ -0,0 +1,12 @@
+def json_train_val():
+    os.makedirs(os.path.join(ROOT_DIR, "tmp"))
+    tmp_dir = os.path.join(ROOT_DIR, "tmp")
+    os.makedirs(os.path.join(tmp_dir, "images"))
+    img_dir = os.path.join(tmp_dir, "images")
+
+    for dir_name in os.listdir(ROOT_DIR):
+        if not (dir_name == "tmp" or dir_name == "test"):
+            shutil.move(os.path.join(ROOT_DIR, dir_name), os.path.join(ROOT_DIR, tmp_dir, dir_name))
+
+    create_annotations(tmp_dir, img_dir)
+

+ 31 - 0
target/File/ego_to_json_4.py

@@ -0,0 +1,31 @@
+def create_annotations(directory, img_dir):
+    annotations = {}
+    for dir_name in os.listdir(directory):
+        if not (dir_name == "images"):
+            for _, _, files in os.walk(os.path.join(directory, dir_name)):
+                mat = scipy.io.loadmat(os.path.join(directory, dir_name, ANNOTATION_FILE))
+
+                for i, img_file in enumerate(sorted(files)):
+                    if not (img_file.endswith(".mat")):
+                        new_img_file = dir_name + "_" + img_file
+
+                        image = {
+                            "name":     new_img_file,
+                            "objects":  []
+                        }
+
+                        for segmentation in mat["polygons"][0][i]:
+                            if segmentation.any():
+                                image["objects"].append(segmentation.tolist())
+                        
+                        annotations[new_img_file] = image
+
+                        shutil.move(os.path.join(directory, dir_name, img_file), os.path.join(img_dir, new_img_file))
+
+    with open(os.path.join(directory, SAVE_FILE), 'w') as output_json_file:
+        json.dump(annotations, output_json_file)
+
+    for dir_name in os.listdir(directory):
+        if not (dir_name == "images" or dir_name == "annotations.json"):
+            shutil.rmtree(os.path.join(directory, dir_name))
+

+ 35 - 0
target/File/ego_to_json_5.py

@@ -0,0 +1,35 @@
+def split_train_val():    
+    tmp_dir = os.path.join(ROOT_DIR, "tmp")
+    
+    os.makedirs(os.path.join(ROOT_DIR, "train"))
+    train_dir = os.path.join(ROOT_DIR, "train")
+    os.makedirs(os.path.join(train_dir, "images"))
+    
+    os.makedirs(os.path.join(ROOT_DIR, "val"))
+    val_dir = os.path.join(ROOT_DIR, "val")
+    os.makedirs(os.path.join(val_dir, "images"))
+
+    # Opening JSON file
+    with open(os.path.join(tmp_dir, 'annotations.json')) as json_file:
+        data = json.load(json_file)
+
+        # 0.1765 is 15% van 100% omdat test al 20 % is (niet helemaal)
+        val_keys = random.sample(list(data), round(len(data) * 0.1765))
+
+        validation = {k: v for k, v in data.items() if k in val_keys}
+        train = {k: v for k, v in data.items() if k not in val_keys}
+
+    with open(os.path.join(val_dir, SAVE_FILE), 'w') as output_json_file:
+        json.dump(validation, output_json_file)
+
+    with open(os.path.join(train_dir, SAVE_FILE), 'w') as output_json_file:
+        json.dump(train, output_json_file)
+        
+    for key, _ in validation.items():
+        shutil.move(os.path.join(tmp_dir, "images", key), os.path.join(val_dir, "images", key))
+
+    for key, _ in train.items():
+        shutil.move(os.path.join(tmp_dir, "images", key), os.path.join(train_dir, "images"))
+
+    shutil.rmtree(tmp_dir)
+

+ 8 - 0
target/File/ego_to_json_6.py

@@ -0,0 +1,8 @@
+def move_to_folder():
+    os.makedirs(os.path.join(ROOT_DIR, "json"))
+    json_dir = os.path.join(ROOT_DIR, "json")
+    shutil.move(os.path.join(ROOT_DIR, "test"), json_dir)
+    shutil.move(os.path.join(ROOT_DIR, "val"), json_dir)
+    shutil.move(os.path.join(ROOT_DIR, "train"), json_dir)
+
+    shutil.move(ROOT_DIR, "../data")

+ 17 - 0
target/File/esquema_1.py

@@ -0,0 +1,17 @@
+def crearFacultad(request):
+    try:
+        
+        json_req = request.json
+        fac_nombre = json_req['fac_nombre']
+        
+        os.mkdir('resources/'+fac_nombre)
+         
+    except OSError:
+        
+        return jsonify({"message":"error al crear facultad"}),500
+    
+    else:
+        
+        return jsonify({"message":"facultad creada"}),200
+    
+

+ 19 - 0
target/File/esquema_2.py

@@ -0,0 +1,19 @@
+def crearCarrera(request):
+    
+    try:
+        
+        json_req = request.json
+        fac_nombre = json_req['fac_nombre']
+        car_nombre= json_req['car_nombre']
+        
+        os.mkdir('resources/'+fac_nombre+'/'+car_nombre)
+        
+    except OSError:
+        
+        return jsonify({"message":"error al crear carrera"}),500
+    
+    else:
+        
+        return jsonify({"message":"carrera creada"}),200
+
+

+ 25 - 0
target/File/esquema_3.py

@@ -0,0 +1,25 @@
+def crearAsignatura(request):
+    
+    try:
+        
+        json_req = request.json
+        fac_nombre = json_req['fac_nombre']
+        car_nombre= json_req['car_nombre']
+        asig_identificador=json_req['asig_identificador']
+        
+        path=('resources/'+fac_nombre+'/'+car_nombre+'/'+asig_identificador+"/")
+        
+        if not os.path.isdir(path):
+            os.makedirs(path)
+        
+        os.mkdir(path+'Portafolios')
+        
+    except OSError as e:
+        print(e.strerror)
+        return jsonify({"message":"error al crear asignatura"}),500
+
+    else:
+        
+        return jsonify({"message":"asignatura creada"}),200
+    
+

+ 41 - 0
target/File/esquema_4.py

@@ -0,0 +1,41 @@
+def crearPortafolio(request):
+    
+    try:
+        
+        json_req = request.json
+        fac_nombre = json_req['fac_nombre']
+        car_nombre= json_req['car_nombre']
+        asig_identificador=json_req['asig_identificador']
+        per_cedula=json_req['per_cedula']
+        
+        pathCedula=('resources/'+fac_nombre+'/'+car_nombre+'/'+asig_identificador+'/Portafolios/'+per_cedula)
+        os.mkdir(pathCedula)
+        
+        pathDatosInf=(pathCedula+'/1. Datos informativos')
+        os.mkdir(pathDatosInf)
+        
+        pathElmentosCurri=(pathCedula+'/2. Elementos curriculares')
+        os.mkdir(pathElmentosCurri)
+        os.mkdir(pathElmentosCurri+'/a. Syllabus')
+        os.mkdir(pathElmentosCurri+'/b. Expectativas')
+        os.mkdir(pathElmentosCurri+'/c. Apuntes de clase')
+        os.mkdir(pathElmentosCurri+'/d. Evaluaciones')
+        os.mkdir(pathElmentosCurri+'/e. Investigaciones')
+        os.mkdir(pathElmentosCurri+'/f. Actividades de experimentación')
+        os.mkdir(pathElmentosCurri+'/g. Proyectos')
+        os.mkdir(pathElmentosCurri+'/h. Estudios de caso')
+        os.mkdir(pathElmentosCurri+'/i. Planteamiento de problemas')
+        os.mkdir(pathElmentosCurri+'/j. Registro de asistencia')
+        os.mkdir(pathElmentosCurri+'/k. Registro de observaciones')
+        os.mkdir(pathElmentosCurri+'/l. Tareas intraclases')
+        os.mkdir(pathElmentosCurri+'/m. Tareas autónomas')
+        os.mkdir(pathElmentosCurri+'/n. Tareas de refuerzo')
+        
+        pathInformeFin=(pathCedula+'/3. Informe final')
+        os.mkdir(pathInformeFin)
+    
+    except OSError as error:
+        print(error)
+        return jsonify({"message":"error al crear portafolio"}),500
+    else:
+        return jsonify({"message":"portafolio creado"}),200  

+ 37 - 0
target/File/file_handler_1.py

@@ -0,0 +1,37 @@
+def file_storage(file_path,suffix):
+    r"""
+        file_path :: The file absolute path
+        suffix :: filename
+
+        file_path=C:\Users\Desktop\video_
+        filename = abc.py
+        return C:\Users\Desktop\video_2020\12\12\abc.py
+    """
+    tm = time.localtime(time.time())
+    # 获取系统当前年,月,日,小时
+    year = time.strftime('%Y', tm)
+    month = time.strftime('%m', tm)
+    day = time.strftime('%d', tm)
+    # 根据当前日期创建图片文件
+    file_year = file_path + '/' + year
+    file_month = file_year + '/' + month
+    file_day = file_month + '/' + day
+    # 判断路径是否存在,没有则创建
+    if not os.path.exists(file_path):
+        os.makedirs(file_path)
+        os.mkdir(file_year)
+        os.mkdir(file_month)
+        os.mkdir(file_day)
+    else:
+        if not os.path.exists(file_year):
+            os.mkdir(file_year)
+            os.mkdir(file_month)
+            os.mkdir(file_day)
+        else:
+            if not os.path.exists(file_month):
+                os.mkdir(file_month)
+                os.mkdir(file_day)
+            else:
+                if not os.path.exists(file_day):
+                    os.mkdir(file_day)
+    return os.path.join(file_day,suffix)

+ 24 - 0
target/File/generate_directories_4.py

@@ -0,0 +1,24 @@
+def generate_numpy_ds(dataset_type):
+    """
+    This function generates the directory structure for the final numpy
+    arrays for the training and test sets. 
+    
+    Director structure for processed data:
+    ProstateX/generated/train/numpy
+    ProstateX/generated/test/numpy
+    """
+    if dataset_type == str(1):
+        new_path = Path('E:/Memoire/ProstateX/generated/train/numpy/')
+        new_path.mkdir(parents = True, exist_ok = True)
+        new_path.joinpath('t2').mkdir(parents = True, exist_ok = True)
+        new_path.joinpath('bval').mkdir(parents = True, exist_ok = True)
+        new_path.joinpath('adc').mkdir(parents = True, exist_ok = True)
+        new_path.joinpath('ktrans').mkdir(parents = True, exist_ok = True)
+    else:
+        new_path = Path('E:/Memoire/ProstateX/generated/test/numpy/')
+        new_path.mkdir(parents = True, exist_ok = True)
+        new_path.joinpath('t2').mkdir(parents = True, exist_ok = True)
+        new_path.joinpath('bval').mkdir(parents = True, exist_ok = True)
+        new_path.joinpath('adc').mkdir(parents = True, exist_ok = True)
+        new_path.joinpath('ktrans').mkdir(parents = True, exist_ok = True)
+        

+ 9 - 0
target/File/generate_directories_5.py

@@ -0,0 +1,9 @@
+def generate_dataframe_ds(dataset_type):
+    if dataset_type == str(1):
+        new_path = Path('E:/Memoire/ProstateX/generated/train/dataframes/')
+        new_path.mkdir(parents = True, exist_ok = True)
+
+    else:
+        new_path = Path('E:/Memoire/ProstateX/generated/test/dataframes/')
+        new_path.mkdir(parents = True, exist_ok = True)
+

+ 9 - 0
target/File/generate_directories_6.py

@@ -0,0 +1,9 @@
+def generate_logs_ds(dataset_type):
+    if dataset_type == str(1):
+        new_path = Path('E:/Memoire/ProstateX/generated/train/logs/')
+        new_path.mkdir(parents = True, exist_ok = True)
+
+    else:
+        new_path = Path('E:/Memoire/ProstateX/generated/test/logs/')
+        new_path.mkdir(parents = True, exist_ok = True)
+

+ 41 - 0
target/File/logging_1.py

@@ -0,0 +1,41 @@
+def set():
+    if not os.path.exists(settings.MEDIA_ROOT):
+        try:
+            os.mkdir(settings.MEDIA_ROOT)
+        except OSError:
+            return
+
+    if not os.path.exists(settings.MEDIA_ROOT+'/download'):
+        try:
+            os.mkdir(settings.MEDIA_ROOT+'/download')
+        except OSError:
+            return
+
+    if not os.path.exists(settings.BASE_DIR + "/log"):
+        try:
+            os.mkdir(settings.BASE_DIR + "/log")
+        except OSError:
+            return
+    if not os.path.exists(settings.BASE_DIR + "/log/message"):
+        try:
+            os.mkdir(settings.BASE_DIR + "/log/message")
+        except OSError:
+            return
+    if not os.path.exists(settings.BASE_DIR + "/log/error"):
+        try:
+            os.mkdir(settings.BASE_DIR + "/log/error")
+        except OSError:
+            return
+    if not os.path.exists(settings.BASE_DIR + "/log/log"):
+        try:
+            os.mkdir(settings.BASE_DIR + "/log/log")
+        except OSError:
+            return
+    if not os.path.exists(settings.MEDIA_ROOT + "/tgbot"):
+        try:
+            os.mkdir(settings.MEDIA_ROOT + "/tgbot")
+        except OSError:
+            return
+
+
+

+ 29 - 0
target/File/logging_2.py

@@ -0,0 +1,29 @@
+def message(message):
+    DirLogs = settings.BASE_DIR + "/log"
+    if not os.path.exists(DirLogs):
+        try:
+            os.mkdir(DirLogs)
+        except OSError:
+            return
+    DirLogs = settings.BASE_DIR + "/log/message"
+    if not os.path.exists(DirLogs):
+        try:
+            os.mkdir(DirLogs)
+        except OSError:
+            return
+    date = datetime.now()
+    month = "0" if date.month < 10 else ""
+    month += str(date.month)
+    day = "0" if date.day < 10 else ""
+    day += str(date.day)
+    StrDate = "%s%s%s" % (str(date.year), month, day)
+    file = open(DirLogs + '/message_' + StrDate + '.log', 'a')
+    my_file = File(file)
+    my_file.write("[%s]: %s\n" % (
+        str(datetime.now().strftime("%d-%m-%Y %H:%M:%S")),
+        str(message)
+        ))
+    my_file.closed
+    file.closed
+
+

+ 28 - 0
target/File/logging_3.py

@@ -0,0 +1,28 @@
+def log(message):
+    DirLogs = settings.BASE_DIR + "/log"
+    if not os.path.exists(DirLogs):
+        try:
+            os.mkdir(DirLogs)
+        except OSError:
+            return
+    DirLogs = settings.BASE_DIR + "/log/log"
+    if not os.path.exists(DirLogs):
+        try:
+            os.mkdir(DirLogs)
+        except OSError:
+            return
+    date = datetime.now()
+    month = "0" if date.month < 10 else ""
+    month += str(date.month)
+    day = "0" if date.day < 10 else ""
+    day += str(date.day)
+    StrDate = "%s%s%s" % (str(date.year), month, day)
+    file = open(DirLogs + '/message_' + StrDate + '.log', 'a')
+    my_file = File(file)
+    my_file.write("[%s]: %s\n" % (
+        str(datetime.now().strftime("%d-%m-%Y %H:%M:%S")),
+        str(message)))
+    my_file.closed
+    file.closed
+
+

+ 28 - 0
target/File/logging_4.py

@@ -0,0 +1,28 @@
+def error(message):
+    DirLogs = settings.BASE_DIR + "/log"
+    if not os.path.exists(DirLogs):
+        try:
+            os.mkdir(DirLogs)
+        except OSError:
+            return
+    DirLogs = settings.BASE_DIR + "/log/error"
+    if not os.path.exists(DirLogs):
+        try:
+            os.mkdir(DirLogs)
+        except OSError:
+            return
+    date = datetime.now()
+    month = "0" if date.month < 10 else ""
+    month += str(date.month)
+    day = "0" if date.day < 10 else ""
+    day += str(date.day)
+    StrDate = "%s%s%s" % (str(date.year), month, day)
+    file = open(DirLogs + '/errors_' + StrDate + '.log', 'a')
+    my_file = File(file)
+    my_file.write("[%s]: %s\n" % (
+        str(datetime.now().strftime("%d-%m-%Y %H:%M:%S")),
+        str(message)))
+    my_file.closed
+    file.closed
+
+

+ 34 - 0
target/File/logging_5.py

@@ -0,0 +1,34 @@
+def check_dir():
+    try:
+        if not os.path.exists(settings.MEDIA_ROOT):
+            try:
+                os.mkdir(settings.MEDIA_ROOT)
+            except OSError:
+                logging.error(traceback.format_exc())
+                return
+        if not os.path.exists(settings.MEDIA_ROOT+"/att"):
+            try:
+                os.mkdir(settings.MEDIA_ROOT+"/att")
+            except OSError:
+                logging.error(traceback.format_exc())
+                return
+        if not os.path.exists(settings.MEDIA_ROOT+"/att/biophoto"):
+            try:
+                os.mkdir(settings.MEDIA_ROOT+"/att/biophoto")
+            except OSError:
+                logging.error(traceback.format_exc())
+                return
+        if not os.path.exists(settings.ATT_ROOT):
+            try:
+                os.mkdir(settings.ATT_ROOT)
+            except OSError:
+                logging.error(traceback.format_exc())
+                return
+        if not os.path.exists(settings.ATT_ROOT+"/USERPIC"):
+            try:
+                os.mkdir(settings.ATT_ROOT+"/USERPIC")
+            except OSError:
+                logging.error(traceback.format_exc())
+                return
+    except Exception as err:
+        logging.error('%s\n%s' % (traceback.format_exc(), str(err)))

+ 26 - 0
target/File/make_folder_1.py

@@ -0,0 +1,26 @@
+def make_folder(dealername):
+    os.getcwd()
+    #'C:\\Users\\corcoras\\Desktop\\FY14 INSTALLS'
+    install_dir = 'C:\\Users\\corcoras\\Desktop\\FY14 INSTALLS'
+    os.chdir(install_dir)
+    #dealername = "Rene motors"
+    dealername_no_space = dealername.replace(" ", "_")
+    dealername_no_space
+    #'Don_Ayres_Honda'
+    dealer_folder = dealername_no_space[:1]
+    dealer_folder
+    #'D'
+    os.chdir(dealer_folder)
+    os.getcwd()
+    #'C:\\Users\\corcoras\\Desktop\\FY14 INSTALLS\\D'
+    dealername_spaces = dealername_no_space.replace("_", " ")
+    dealername_spaces
+    #'Don Ayres Honda'
+    os.mkdir(dealername_spaces)
+    os.chdir(dealername_spaces)
+    os.getcwd()
+    #'C:\\Users\\corcoras\\Desktop\\FY14 INSTALLS\\D\\Don Ayres Honda'
+    os.mkdir("config")
+    os.mkdir("original")
+    os.mkdir("final")
+    print(f"\nFolder was created : {install_dir}\{dealer_folder}\{dealername_spaces}")

+ 25 - 0
target/File/mkdir_2.py

@@ -0,0 +1,25 @@
+def main():
+    usage = '%prog path [path2] [path3] [pathN]\n\n' + __doc__.strip()
+    parser = OptionParser(usage=usage, option_list=(
+        make_option('-v', '--verbose', default=False, action='store_true'),
+    ))
+    
+    options, args = parser.parse_args()
+    
+    if len(args) == 0:
+        parser.error('No paths given.')
+    
+    output = sys.stdout if options.verbose else None
+    
+    for index, path in enumerate(args):
+        path = path.replace('.', os.path.sep)
+        
+        if output and index > 0:
+            output.write('\n')
+        
+        try:
+            pydir(path, output=output)
+        except BaseException as exc:
+            print ('Couldn\'t create %s: %s' % (path, exc,))
+
+

+ 28 - 0
target/File/mkdir_4.py

@@ -0,0 +1,28 @@
+def info(line):
+    if output:
+        output.write(line)
+        output.write('\n')
+
+    try:
+        os.makedirs(path)
+    except (OSError, IOError) as exc:
+        if not os.path.isdir(path):
+            info('Path already exists: %s' % path)
+        else:
+            raise
+    else:
+        info('Created directory %s' % path)
+
+    segments = path.split(os.path.sep)
+    for i in xrange(len(segments)):
+        init_filename = os.path.sep.join(segments[:i+1] + ['__init__.py'])
+        if not os.path.isfile(init_filename):
+            try:
+                open(init_filename, 'w').close()
+            except (OSError, IOError) as exc:
+                raise
+            else:
+                info('Created file %s' % (init_filename,))
+        else:
+            info('File already exists: %s' % (init_filename,))
+

+ 8 - 0
target/File/mkdir_p_1.py

@@ -0,0 +1,8 @@
+def mkdir_p(path):
+    try:
+        os.makedirs(path)
+    except OSError as exc:
+        if exc.errno == errno.EEXIST and os.path.isdir(path):
+            pass
+        else:
+            raise  

+ 24 - 0
target/File/project_creator_2.py

@@ -0,0 +1,24 @@
+def copySequenceClean(fromFile,projectFolderPath):
+    # Copy sequence and clean heads
+    f1 = open(fromFile,"r")
+    f2 = open(os.path.join(projectFolderPath,"sequence.fasta"),"w+")
+    f3 = open(os.path.join(projectFolderPath,"sequence_heads.txt"),"w+")
+    line = f1.readline()
+    counter = 0
+    while line!="":
+        if(line.startswith(">")):
+            counter += 1
+            f3.write(">seq"+str(counter)+"\t"+line)
+            f2.write(">seq"+str(counter)+"\n")
+        else:
+            f2.write(line.upper())
+        line = f1.readline()
+    f1.close()
+    f2.close()
+    f3.close()
+    # Create reverse complement Fasta file
+    records = map(make_rc_record, SeqIO.parse(os.path.join(projectFolderPath,"sequence.fasta"), "fasta"))
+    SeqIO.write(records, os.path.join(projectFolderPath,"sequence_rc.fasta"), "fasta")
+    records = map(make_rc_record, SeqIO.parse(os.path.join(projectFolderPath,"sequence_rc.fasta"), "fasta"))
+    SeqIO.write(records, os.path.join(projectFolderPath,"sequence.fasta"), "fasta")
+    

+ 41 - 0
target/File/project_creator_3.py

@@ -0,0 +1,41 @@
+def createProject(projectFolder, projectName, inputFasta):
+    # Check if project folder exists
+    if(not path.isdir(projectFolder)):
+        os.mkdir(projectFolder)    
+    # Check if given project already exits
+    projectFolderPath = os.path.join(projectFolder,projectName)
+    if(path.isdir(projectFolderPath)):
+        print("Project already exists, process aborted")
+        return "EXIT"
+    os.mkdir(projectFolderPath)
+    # Create folder structure for annotation softwares
+    os.mkdir(os.path.join(projectFolderPath,"tirvish"))
+    os.mkdir(os.path.join(projectFolderPath,"tirvish_rc"))
+    os.mkdir(os.path.join(projectFolderPath,"sinescan"))
+    os.mkdir(os.path.join(projectFolderPath,"sinefind"))
+    os.mkdir(os.path.join(projectFolderPath,"sinefind_rc"))
+    os.mkdir(os.path.join(projectFolderPath,"repMasker"))
+    os.mkdir(os.path.join(projectFolderPath,"repeatmodel"))
+    os.mkdir(os.path.join(projectFolderPath,"must"))
+    os.mkdir(os.path.join(projectFolderPath,"mitetracker"))
+    os.mkdir(os.path.join(projectFolderPath,"mitetracker_rc"))
+    os.mkdir(os.path.join(projectFolderPath,"mitefind"))
+    os.mkdir(os.path.join(projectFolderPath,"mitefind_rc"))
+    os.mkdir(os.path.join(projectFolderPath,"ltrPred"))
+    os.mkdir(os.path.join(projectFolderPath,"ltrHarvest"))
+    os.mkdir(os.path.join(projectFolderPath,"helitronScanner"))
+    os.mkdir(os.path.join(projectFolderPath,"helitronScanner_rc")) 
+    os.mkdir(os.path.join(projectFolderPath,"transposonPSI")) 
+    os.mkdir(os.path.join(projectFolderPath,"NCBICDD1000")) 
+    os.mkdir(os.path.join(projectFolderPath,"parsedAnnotations")) 
+    os.mkdir(os.path.join(projectFolderPath,"transposonCandA")) 
+    os.mkdir(os.path.join(projectFolderPath,"transposonCandB")) 
+    os.mkdir(os.path.join(projectFolderPath,"transposonCandC")) 
+    os.mkdir(os.path.join(projectFolderPath,"transposonCandD")) 
+    os.mkdir(os.path.join(projectFolderPath,"transposonCandE")) 
+    os.mkdir(os.path.join(projectFolderPath,"transposonCandF")) 
+    os.mkdir(os.path.join(projectFolderPath,"finalResults")) 
+    # Copy DNA into folder
+    copySequenceClean(inputFasta,projectFolderPath)
+
+#createProject("projects", "testProject", "G:/CambridgeGenData/GenSeq/RHIZIPHAGUS_IRR/rir17contigs.fasta")

+ 22 - 0
target/File/setup_2.py

@@ -0,0 +1,22 @@
+def sort_data(num_vincs=6):
+    """
+    DESCRIPTION:
+    Sorts data folders in Ejecta_Simulation_Data by vinc.
+    
+    CALLING SEQUENCE:
+    sort_data(num_vincs=6)
+    
+    KEYWORDS:
+    ## num_vincs: number of velocity increments (default 6; +0-5 km/s)
+    """
+    
+    parent = os.getcwd()
+    folders = sorted(glob.glob(parent + '/Ejecta_Simulation_Data/5000e*'))
+    for i in range(num_vincs):
+        Path(parent + '/Ejecta_Simulation_Data/'+str(i)+'vinc').mkdir(parents=True, exist_ok=True)
+    for folder in folders:
+        vincnum = folder.split('/')[-1].split('_')[2][0]
+        shutil.move(folder, parent + '/Ejecta_Simulation_Data/' + str(vincnum) + 'vinc')
+    
+    
+    

+ 23 - 0
target/File/split_data_in_k_folds_1.py

@@ -0,0 +1,23 @@
+def populate_kfold_directories(data_dir, K_FOLDS):
+
+    alarmed_images = os.listdir(f"{data_dir}/Alarmed")
+    annoyed_images = os.listdir(f"{data_dir}/Annoyed")
+    curious_images = os.listdir(f"{data_dir}/Curious")
+    relaxed_images = os.listdir(f"{data_dir}/Relaxed")
+
+    for i in range(K_FOLDS):
+        validation_range = (i*20, i*20 + 20)
+
+        for j in range(0, 100):
+            if validation_range[0] <= j < validation_range[1]:
+                shutil.copy(f"{data_dir}/Alarmed/{alarmed_images[j]}", f"folds/fold{i}/validation/Alarmed/")
+                shutil.copy(f"{data_dir}/Annoyed/{annoyed_images[j]}", f"folds/fold{i}/validation/Annoyed/")
+                shutil.copy(f"{data_dir}/Curious/{curious_images[j]}", f"folds/fold{i}/validation/Curious/")
+                shutil.copy(f"{data_dir}/Relaxed/{relaxed_images[j]}", f"folds/fold{i}/validation/Relaxed/")
+            else:
+                shutil.copy(f"{data_dir}/Alarmed/{alarmed_images[j]}", f"folds/fold{i}/train/Alarmed/")
+                shutil.copy(f"{data_dir}/Annoyed/{annoyed_images[j]}", f"folds/fold{i}/train/Annoyed/")
+                shutil.copy(f"{data_dir}/Curious/{curious_images[j]}", f"folds/fold{i}/train/Curious/")
+                shutil.copy(f"{data_dir}/Relaxed/{relaxed_images[j]}", f"folds/fold{i}/train/Relaxed/")
+
+

+ 22 - 0
target/File/split_data_in_k_folds_2.py

@@ -0,0 +1,22 @@
+def create_kfold_directories(K_FOLDS):
+
+    try:
+        os.mkdir("folds")
+    except:
+        print("Directory 'folds' already exists")
+
+    for i in range(K_FOLDS):
+        try:
+            os.mkdir(f"folds/fold{i}/")
+            os.mkdir(f"folds/fold{i}/train")
+            os.mkdir(f"folds/fold{i}/validation")
+            os.mkdir(f"folds/fold{i}/train/Alarmed")
+            os.mkdir(f"folds/fold{i}/train/Annoyed")
+            os.mkdir(f"folds/fold{i}/train/Curious")
+            os.mkdir(f"folds/fold{i}/train/Relaxed")
+            os.mkdir(f"folds/fold{i}/validation/Alarmed")
+            os.mkdir(f"folds/fold{i}/validation/Annoyed")
+            os.mkdir(f"folds/fold{i}/validation/Curious")
+            os.mkdir(f"folds/fold{i}/validation/Relaxed")
+        except:
+            print("Can't create directory because it already exists")

+ 71 - 0
target/File/stc_vid2frames_1.py

@@ -0,0 +1,71 @@
+def main():
+    parser = argparse.ArgumentParser(add_help=True)
+    parser.add_argument('--dataroot',
+                        default='.',
+                        help='Dataset root directory')
+    parser.add_argument('--src_vid_path', default='archive/training/videos/',
+                        help='Name of folder where `avi` files exist')
+    parser.add_argument('--tar_vid_frame_path', default='converted/train',
+                        help='Name of folder to save extracted frames.')
+    parser.add_argument('--src_npy_path', default='archive/test_pixel_mask/',
+                        help='Name of folder where `npy` frame mask exist')
+    parser.add_argument('--tar_anno_path', default='converted/pixel_mask',
+                        help='Name of folder to save extracted frame annotation')
+    parser.add_argument('--extension', default='jpg',
+                        help="File extension format for the output image")
+
+    args = parser.parse_args()
+
+    src_dir = os.path.join(args.dataroot, args.src_vid_path)
+    tar_dir = os.path.join(args.dataroot, args.tar_vid_frame_path)
+
+    try:
+        os.makedirs(tar_dir)
+    except FileExistsError:
+        print(F'{tar_dir} already exists, remove whole tree and recompose ...')
+        shutil.rmtree(tar_dir)
+        os.makedirs(tar_dir)
+
+    vid_list = os.listdir(src_dir)
+
+    for i, vidname in enumerate(tqdm(vid_list)):
+        vid = torchvision.io.read_video(os.path.join(src_dir, vidname), pts_unit='sec')[0]
+        target_folder = os.path.join(tar_dir, vidname[:-4])
+   
+        try: 
+            os.makedirs(target_folder)
+        except FileExistsError:
+            print(F'{target_folder} already exists, remove the directory recompose ...')
+            shutil.rmtree(target_folder)
+            os.makedirs(target_folder) 
+            
+        for i, frame in enumerate(vid):
+            frame = (frame / 255.).permute(2, 0, 1) #HWC2CHW
+            torchvision.utils.save_image(frame,
+                                         F'{target_folder}/{i:03}.{args.extension}') 
+    
+    src_dir = os.path.join(args.dataroot, args.src_npy_path)    
+    tar_dir = os.path.join(args.dataroot, args.tar_anno_path)
+
+    try:
+        os.makedirs(tar_dir)
+    except FileExistsError:
+        print(F"{tar_dir} already exists, remove whole tree and recompose ...")
+        shutil.rmtree(tar_dir)
+        os.makedirs(tar_dir)
+
+    frame_anno = os.listdir(src_dir)
+
+    for _f in tqdm(frame_anno):
+        fn = _f[:-4]
+        target_folder = os.path.join(tar_dir, fn)
+        os.makedirs(target_folder)
+        px_anno = np.load(F"{src_dir}/{fn}.npy").astype(np.float)
+
+        for i, px_frame in enumerate(px_anno):
+            torchvision.utils.save_image(torch.from_numpy(px_frame).unsqueeze(0), # CHW, 1 channel
+                                         F"{target_folder}/{i:03}.{args.extension}")
+
+
+if __name__ == '__main__':
+    main()

+ 23 - 0
target/File/test_archive_10.py

@@ -0,0 +1,23 @@
+def test_extract_with_symlink_and_change_topdir(self):
+    if sys.platform.startswith("win"):
+        return
+    src = os.path.join(self.tmp, "src")
+    os.mkdir(src)
+    a_long_dir = os.path.join(src, "a_long_dir")
+    os.mkdir(a_long_dir)
+    a_file = os.path.join(a_long_dir, "a_file")
+    with open(a_file, "w") as fp:
+        fp.write("a_file\n")
+    a_link = os.path.join(a_long_dir, "a_link")
+    os.symlink("a_file", a_link)
+    tar_gz = qibuild.archive.zip_unix(a_long_dir)
+    dest = os.path.join(self.tmp, "dest")
+    os.mkdir(dest)
+    qibuild.archive.extract(tar_gz, dest, topdir="a_dir")
+    ls_r = qibuild.sh.ls_r(dest)
+    self.assertEquals(ls_r,
+        ['a_dir/a_file', 'a_dir/a_link'])
+    dest_link = os.path.join(dest, "a_dir", "a_link")
+    self.assertTrue(os.path.islink(dest_link))
+    dest_target = os.readlink(dest_link)
+    self.assertEquals(dest_target, "a_file")

+ 21 - 0
target/File/test_archive_3.py

@@ -0,0 +1,21 @@
+def test_zip_extract(self):
+    # Create some files in the temp dir:
+    src = os.path.join(self.tmp, "src")
+    os.mkdir(src)
+    # Create a empty dir called a, and two files named
+    # b and c
+    a = os.path.join(src, "a")
+    os.mkdir(a)
+    b = os.path.join(a, "b")
+    with open(b, "w") as fp:
+        fp.write("b\n")
+    c = os.path.join(a, "c")
+    with open(c, "w") as fp:
+        fp.write("c\n")
+    archive = qibuild.archive.zip(a)
+    dest = os.path.join(self.tmp, "dest")
+    os.mkdir(dest)
+    qibuild.archive.extract(archive, dest)
+    ls_r = qibuild.sh.ls_r(dest)
+    self.assertEquals(ls_r, ["a/b", "a/c"])
+

+ 28 - 0
target/File/test_archive_4.py

@@ -0,0 +1,28 @@
+def test_zip_extract_ro(self):
+    src = os.path.join(self.tmp, "src")
+    os.mkdir(src)
+    # Create a empty dir called a, and two files named
+    # b and c
+    a = os.path.join(src, "a")
+    os.mkdir(a)
+    ro = os.path.join(a, "ro")
+    with open(ro, "w") as fp:
+        fp.write("ro\n")
+    # 200:
+    os.chmod(ro, stat.S_IRUSR)
+    archive = qibuild.archive.zip(a)
+    dest = os.path.join(self.tmp, "dest")
+    os.mkdir(dest)
+    qibuild.archive.extract(archive, dest)
+    ls_r = qibuild.sh.ls_r(dest)
+    self.assertEquals(ls_r, ["a/ro"])
+    dest_ro = os.path.join(dest, "a", "ro")
+    # check that the dest is readonly:
+    error = None
+    try:
+        open(dest_ro, "w")
+    except IOError as e:
+        error = e
+    self.assertFalse(error is None)
+    self.assertEquals(error.errno,  errno.EACCES)
+

+ 20 - 0
target/File/test_archive_5.py

@@ -0,0 +1,20 @@
+def test_zip_extract_ro_dir(self):
+    src = os.path.join(self.tmp, "src")
+    os.mkdir(src)
+    ro1 = os.path.join(src, "ro1")
+    os.mkdir(ro1)
+    ro2 = os.path.join(ro1, "ro2")
+    os.mkdir(ro2)
+    a = os.path.join(ro2, "a")
+    with open(a, "w") as fp:
+        fp.write("a\n")
+    # RO dir inside an other RO dir
+    os.chmod(ro2, stat.S_IRUSR | stat.S_IXUSR)
+    os.chmod(ro1, stat.S_IRUSR | stat.S_IXUSR)
+    archive = qibuild.archive.zip(src)
+    dest = os.path.join(self.tmp, "dest")
+    os.mkdir(dest)
+    qibuild.archive.extract(archive, dest)
+    ls_r = qibuild.sh.ls_r(dest)
+    self.assertEquals(ls_r, ["src/ro1/ro2/a"])
+

+ 21 - 0
target/File/test_archive_6.py

@@ -0,0 +1,21 @@
+def test_extract_preserve_executables_from_zip(self):
+    zip = qibuild.command.find_program("zip")
+    if not zip:
+        return
+    src = os.path.join(self.tmp, "src")
+    os.mkdir(src)
+    a_exe = os.path.join(src, "a.exe")
+    with open(a_exe, "w") as fp:
+        fp.write("a_exe\n")
+    st_700 = stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR
+    os.chmod(a_exe, st_700)
+    qibuild.command.call(["zip", "-r", "src.zip", "src"],
+        cwd=self.tmp)
+    archive = os.path.join(self.tmp, "src.zip")
+    dest = os.path.join(self.tmp, "dest")
+    os.mkdir(dest)
+    qibuild.archive.extract_zip(archive, dest)
+    dest_exe = os.path.join(dest, "src", "a.exe")
+    st_mode = os.stat(dest_exe).st_mode
+    self.assertEquals(st_mode, 100700)
+

+ 21 - 0
target/File/test_archive_7.py

@@ -0,0 +1,21 @@
+def test_extract_change_topdir(self):
+    src = os.path.join(self.tmp, "src")
+    os.mkdir(src)
+    a_long_dir = os.path.join(src, "a_long_dir")
+    os.mkdir(a_long_dir)
+    b = os.path.join(a_long_dir, "b")
+    with open(b, "w") as fp:
+        fp.write("b\n")
+    dest = os.path.join(self.tmp, "dest")
+    os.mkdir(dest)
+    tar_gz = qibuild.archive.zip_unix(a_long_dir)
+    qibuild.archive.extract(tar_gz, dest, topdir="a")
+    a = os.path.join(dest, "a")
+    ls_r = qibuild.sh.ls_r(a)
+    self.assertEquals(ls_r, ["b"])
+    a_zip = qibuild.archive.zip_win(a_long_dir)
+    qibuild.archive.extract(a_zip, dest, topdir="aa")
+    aa = os.path.join(dest, "aa")
+    ls_r = qibuild.sh.ls_r(aa)
+    self.assertEquals(ls_r, ["b"])
+

+ 11 - 0
target/File/test_archive_8.py

@@ -0,0 +1,11 @@
+def test_extract_change_topdir_already_correct(self):
+    src = os.path.join(self.tmp, "src")
+    os.mkdir(src)
+    a_dir = os.path.join(src, "a")
+    os.mkdir(a_dir)
+    tar_gz = qibuild.archive.zip_unix(a_dir)
+    dest = os.path.join(self.tmp, "dest")
+    qibuild.archive.extract(tar_gz, dest, topdir="a")
+    ls_r = qibuild.sh.ls_r(dest)
+    self.assertEquals(ls_r, ["a/"])
+

+ 24 - 0
target/File/test_archive_9.py

@@ -0,0 +1,24 @@
+def test_extract_with_symlink(self):
+    if sys.platform.startswith("win"):
+        return
+    src = os.path.join(self.tmp, "src")
+    os.mkdir(src)
+    a_dir = os.path.join(src, "a_dir")
+    os.mkdir(a_dir)
+    a_file = os.path.join(a_dir, "a_file")
+    with open(a_file, "w") as fp:
+        fp.write("a_file\n")
+    a_link = os.path.join(a_dir, "a_link")
+    os.symlink("a_file", a_link)
+    tar_gz = qibuild.archive.zip_unix(a_dir)
+    dest = os.path.join(self.tmp, "dest")
+    os.mkdir(dest)
+    qibuild.archive.extract(tar_gz, dest)
+    ls_r = qibuild.sh.ls_r(dest)
+    self.assertEquals(ls_r,
+        ['a_dir/a_file', 'a_dir/a_link'])
+    dest_link = os.path.join(dest, "a_dir", "a_link")
+    self.assertTrue(os.path.islink(dest_link))
+    dest_target = os.readlink(dest_link)
+    self.assertEquals(dest_target, "a_file")
+

+ 9 - 0
target/File/test_tool_2.py

@@ -0,0 +1,9 @@
+def get_temp_dir(self):
+    if os.name == 'posix':
+        return '/tmp'
+    elif os.name == 'nt':
+        import tempfile
+        return tempfile.gettempdir()
+    else:
+        raise Exception("Unknown operating system [%s]" % os.name)
+

+ 6 - 0
target/File/test_tool_3.py

@@ -0,0 +1,6 @@
+def create_file(self, filename):
+    with open(filename, "w+") as file:
+        file.writelines(["line1", "line2", "line3"])
+        file.flush()
+        file.close()
+

+ 33 - 0
target/File/test_tool_4.py

@@ -0,0 +1,33 @@
+def test_recursive_copy(self):
+    tmp_dir = self.get_temp_dir() + os.sep +"programy"
+
+    if os.path.exists(tmp_dir):
+        shutil.rmtree(tmp_dir)
+
+    os.mkdir(tmp_dir)
+
+    src_dir =  tmp_dir + os.sep + "src"
+    os.mkdir(src_dir)
+    src_sub_dir = tmp_dir + os.sep + "src" + os.sep + "sub"
+    os.mkdir(src_sub_dir)
+    src_sub_dir2 = tmp_dir + os.sep + "src" + os.sep + "sub2"
+    os.mkdir(src_sub_dir2)
+    dest_dir = tmp_dir + os.sep + "dest"
+    os.mkdir(dest_dir)
+
+    self.create_file(src_dir + os.sep + "file1.txt")
+    self.create_file(src_dir + os.sep + "file2.txt")
+    self.create_file(src_dir + os.sep + "file3.txt")
+    self.create_file(src_dir + os.sep + "sub" + os.sep + "file4.txt")
+
+    AdminTool.recursive_copy(src_dir, dest_dir)
+
+    self.assertTrue(os.path.exists(src_dir + os.sep + "file1.txt"))
+    self.assertTrue(os.path.exists(src_dir + os.sep + "file2.txt"))
+    self.assertTrue(os.path.exists(src_dir + os.sep + "file3.txt"))
+    self.assertTrue(os.path.exists(src_dir + os.sep + "sub" + os.sep + "file4.txt"))
+    self.assertTrue(os.path.exists(src_dir + os.sep + "sub2"))
+
+    if os.path.exists(tmp_dir):
+        shutil.rmtree(tmp_dir)
+

+ 35 - 0
target/File/test_tool_5.py

@@ -0,0 +1,35 @@
+def test_delete_folder_contents(self):
+    tmp_dir = self.get_temp_dir() + os.sep +"programy"
+
+    if os.path.exists(tmp_dir):
+        shutil.rmtree(tmp_dir)
+
+    os.mkdir(tmp_dir)
+
+    src_dir =  tmp_dir + os.sep + "src"
+    os.mkdir(src_dir)
+    src_sub_dir = tmp_dir + os.sep + "src" + os.sep + "sub"
+    os.mkdir(src_sub_dir)
+    dest_dir = tmp_dir + os.sep + "dest"
+    os.mkdir(dest_dir)
+
+    self.create_file(src_dir + os.sep + "file1.txt")
+    self.create_file(src_dir + os.sep + "file2.txt")
+    self.create_file(src_dir + os.sep + "file3.txt")
+    self.create_file(src_dir + os.sep + "sub" + os.sep + "file4.txt")
+
+    self.assertTrue(os.path.exists(src_dir + os.sep + "file1.txt"))
+    self.assertTrue(os.path.exists(src_dir + os.sep + "file2.txt"))
+    self.assertTrue(os.path.exists(src_dir + os.sep + "file3.txt"))
+    self.assertTrue(os.path.exists(src_dir + os.sep + "sub" + os.sep + "file4.txt"))
+
+    AdminTool.delete_folder_contents(tmp_dir)
+
+    self.assertFalse(os.path.exists(src_dir + os.sep + "file1.txt"))
+    self.assertFalse(os.path.exists(src_dir + os.sep + "file2.txt"))
+    self.assertFalse(os.path.exists(src_dir + os.sep + "file3.txt"))
+    self.assertFalse(os.path.exists(src_dir + os.sep + "sub" + os.sep + "file4.txt"))
+
+    if os.path.exists(tmp_dir):
+        shutil.rmtree(tmp_dir)
+

+ 18 - 0
target/File/test_tool_6.py

@@ -0,0 +1,18 @@
+def test_make_executable(self):
+    tmp_dir = self.get_temp_dir() + os.sep +"programy"
+
+    if os.path.exists(tmp_dir):
+        shutil.rmtree(tmp_dir)
+
+    os.mkdir(tmp_dir)
+
+    filepath = tmp_dir + os.sep + "file1.txt"
+    self.create_file(filepath)
+
+    self.assertTrue(os.path.exists(filepath))
+
+    AdminTool.make_executable(filepath)
+
+    if os.path.exists(tmp_dir):
+        shutil.rmtree(tmp_dir)
+

+ 25 - 0
target/File/test_tool_7.py

@@ -0,0 +1,25 @@
+def test_make_all_executable(self):
+    tmp_dir = self.get_temp_dir() + os.sep +"programy"
+
+    if os.path.exists(tmp_dir):
+        shutil.rmtree(tmp_dir)
+
+    os.mkdir(tmp_dir)
+
+    src_dir =  tmp_dir + os.sep + "src"
+    os.mkdir(src_dir)
+    src_sub_dir = tmp_dir + os.sep + "src" + os.sep + "sub"
+    os.mkdir(src_sub_dir)
+    dest_dir = tmp_dir + os.sep + "dest"
+    os.mkdir(dest_dir)
+
+    self.create_file(src_dir + os.sep + "file1.txt")
+    self.create_file(src_dir + os.sep + "file2.txt")
+    self.create_file(src_dir + os.sep + "file3.txt")
+    self.create_file(src_dir + os.sep + "sub" + os.sep + "file4.txt")
+
+    AdminTool.make_all_executable(tmp_dir)
+
+    if os.path.exists(tmp_dir):
+        shutil.rmtree(tmp_dir)
+

+ 8 - 0
target/File/tutorial_1.py

@@ -0,0 +1,8 @@
+def del_create_analytics_folder():
+    # del the analytics folder including subfolder
+    # mkdir the analytics folder (only mkdir)
+    if os.path.exists('analytics'):
+        shutil.rmtree('analytics')
+    if not os.path.exists('analytics'):
+        os.mkdir('analytics')
+

+ 55 - 0
target/File/tutorial_2.py

@@ -0,0 +1,55 @@
+def course():
+    if not os.path.exists('analytics'):
+        os.mkdir('analytics')
+    if os.path.exists('analytics/course'):
+        shutil.rmtree('analytics/course')
+    d = {'01':'btech',
+    '11':'mtech',
+    '21':'phd',
+    '12':'msc'}
+    with open('studentinfo_cs384.csv', newline='') as csvfile:
+        reader = csv.DictReader(csvfile)
+        if not os.path.exists('analytics/course'):
+            os.mkdir('analytics/course')
+        for row in reader:
+            if len(row)==0:
+                print(1)
+                continue
+            l = list(row.values())
+            head = list(row.keys())
+            stream = str(row['id'][-4:-2]).lower()
+            yr = str(row['id'][:2])
+            if str(row['id'][2:4]) in list(d.keys()):
+                degree = d[str(row['id'][2:4])]
+            else:
+                with open('analytics/course/' + 'misc.csv' , mode = 'a') as f:
+                    f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                    if os.path.getsize('analytics/course/' + 'misc.csv')==0:
+                        f_write.writerow(head)
+                    f_write.writerow(l)
+                f.close()
+                continue
+            csv_name = f'{yr}_{stream}_{degree}.csv'
+            p = re.compile(r'\d\d\d\d\D\D\d\d')
+            k = re.fullmatch(p,row['id'])
+            if k:
+                if not os.path.exists('analytics/course/'+ stream):
+                    os.mkdir('analytics/course/'+ stream) 
+                if not os.path.exists('analytics/course/'+ stream + '/' + degree):
+                    os.mkdir('analytics/course/'+ stream + '/' + degree ) 
+                with open('analytics/course/'+ stream + '/' + degree + '/' + csv_name , mode = 'a') as f:
+                    f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                    if os.path.getsize('analytics/course/'+ stream + '/' + degree + '/' + csv_name)==0:
+                        f_write.writerow(head)
+                    f_write.writerow(l)
+                f.close()
+            else:
+                with open('analytics/course/' + 'misc.csv' , mode = 'a') as f:
+                    f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                    if os.path.getsize('analytics/course/' + 'misc.csv')==0:
+                        f_write.writerow(head)
+                    f_write.writerow(l)
+                f.close()
+    csvfile.close()
+
+

+ 23 - 0
target/File/tutorial_3.py

@@ -0,0 +1,23 @@
+def country():
+    if not os.path.exists('analytics'):
+        os.mkdir('analytics')    
+    if os.path.exists('analytics/country'):
+        shutil.rmtree('analytics/country')    
+    with open('studentinfo_cs384.csv', newline='') as csvfile:
+        reader = csv.DictReader(csvfile)
+        if not os.path.exists('analytics/country'):
+            os.mkdir('analytics/country')
+        for row in reader:
+            l = list(row.values())
+            head = list(row.keys())
+            with open('analytics/country/'+row['country'].lower()+ '.csv', mode = 'a') as f:
+                f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                if os.path.getsize('analytics/country/'+row['country'].lower() + '.csv')==0:
+                  f_write.writerow(head) 
+                f_write.writerow(l)
+            f.close()
+    csvfile.close()
+
+
+
+

+ 34 - 0
target/File/tutorial_4.py

@@ -0,0 +1,34 @@
+def email_domain_extract():
+    if not os.path.exists('analytics'):
+        os.mkdir('analytics')
+    if os.path.exists('analytics/email'):
+        shutil.rmtree('analytics/email')
+    with open('studentinfo_cs384.csv', newline='') as csvfile:
+        reader = csv.DictReader(csvfile)
+        if not os.path.exists('analytics/email'):
+            os.mkdir('analytics/email')
+        for row in reader:
+            l = list(row.values())
+            head = list(row.keys())
+            if '@' in row['email'] and '.' in row['email']:
+                domain = row['email'].split('@')[1].split('.')[0]
+                with open('analytics/email/'+domain+ '.csv', mode = 'a') as f:
+                    f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                    if os.path.getsize('analytics/email/'+ domain + '.csv')==0:
+                        f_write.writerow(head) 
+                    f_write.writerow(l)
+                f.close()
+
+            else:
+                with open('analytics/email/'+'misc'+ '.csv', mode = 'a') as f:
+                    f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                    if os.path.getsize('analytics/email/'+ domain + '.csv')==0:
+                        f_write.writerow(head) 
+                    f_write.writerow(l)
+                f.close()
+    csvfile.close()
+
+
+
+
+

+ 22 - 0
target/File/tutorial_5.py

@@ -0,0 +1,22 @@
+def gender():
+    if not os.path.exists('analytics'):
+        os.mkdir('analytics')
+    if os.path.exists('analytics/gender'):
+        shutil.rmtree('analytics/gender')
+    with open('studentinfo_cs384.csv', newline='') as csvfile:
+        reader = csv.DictReader(csvfile)
+        if not os.path.exists('analytics/gender'):
+            os.mkdir('analytics/gender')
+        for row in reader:
+            l = list(row.values())
+            head = list(row.keys())
+            gender = row['gender'].lower()
+            with open('analytics/gender/'+gender+ '.csv', mode = 'a') as f:
+                    f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                    if os.path.getsize('analytics/gender/'+ gender + '.csv')==0:
+                        f_write.writerow(head) 
+                    f_write.writerow(l)
+            f.close()
+    csvfile.close()
+
+

+ 31 - 0
target/File/tutorial_6.py

@@ -0,0 +1,31 @@
+def dob():
+    if not os.path.exists('analytics'):
+        os.mkdir('analytics')
+    if os.path.exists('analytics/dob'):
+        shutil.rmtree('analytics/dob')
+    with open('studentinfo_cs384.csv', newline='') as csvfile:
+        reader = csv.DictReader(csvfile)
+        if not os.path.exists('analytics/dob'):
+            os.mkdir('analytics/dob')
+        for row in reader:
+            l = list(row.values())
+            head = list(row.keys())
+            x = str(re.sub(r"\D","-",row['dob']))
+            yr = int(x.split('-')[-1])
+            k = int(yr)%10
+            if k>4:
+                name = 'bday_' + str(yr - k + 5) + '_' + str(yr - k + 9)
+            else:
+                name = 'bday_' + str(yr - k ) + '_' + str(yr - k + 4)
+            if yr > 2014:
+                name = 'bday_2015_2020'
+            with open('analytics/dob/'+name+ '.csv', mode = 'a') as f:
+                    f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                    if os.path.getsize('analytics/dob/'+name+ '.csv')==0:
+                        f_write.writerow(head) 
+                    f_write.writerow(l)
+            f.close()
+        
+
+
+

+ 21 - 0
target/File/tutorial_7.py

@@ -0,0 +1,21 @@
+def state():
+    if not os.path.exists('analytics'):
+        os.mkdir('analytics')
+    if os.path.exists('analytics/state'):
+        shutil.rmtree('analytics/state')
+    with open('studentinfo_cs384.csv', newline='') as csvfile:
+        reader = csv.DictReader(csvfile)
+        if not os.path.exists('analytics/state'):
+            os.mkdir('analytics/state')
+        for row in reader:
+            l = list(row.values())
+            head = list(row.keys())
+            with open('analytics/state/'+row['state'].lower()+ '.csv', mode = 'a') as f:
+                f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                if os.path.getsize('analytics/state/'+row['state'].lower() + '.csv')==0:
+                  f_write.writerow(head) 
+                f_write.writerow(l)
+            f.close()
+    csvfile.close()
+
+

+ 22 - 0
target/File/tutorial_8.py

@@ -0,0 +1,22 @@
+def blood_group():
+    if not os.path.exists('analytics'):
+        os.mkdir('analytics')
+    if os.path.exists('analytics/blood_group'):
+        shutil.rmtree('analytics/blood_group')
+    with open('studentinfo_cs384.csv', newline='') as csvfile:
+        reader = csv.DictReader(csvfile)
+        if not os.path.exists('analytics/blood_group'):
+            os.mkdir('analytics/blood_group')
+        for row in reader:
+            l = list(row.values())
+            head = list(row.keys())
+            with open('analytics/blood_group/'+row['blood_group']+ '.csv', mode = 'a') as f:
+                f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+                if os.path.getsize('analytics/blood_group/'+row['blood_group'] + '.csv')==0:
+                    f_write.writerow(head) 
+                f_write.writerow(l)
+            f.close()
+    csvfile.close()
+
+
+# Create the new file here and also sort it in this function only.

+ 40 - 0
target/File/tutorial_9.py

@@ -0,0 +1,40 @@
+def new_file_sort():
+    if not os.path.exists('analytics'):
+        os.mkdir('analytics')
+    new = []
+    head = []
+    with open('studentinfo_cs384.csv', newline='') as csvfile:
+        reader = csv.DictReader(csvfile)       
+        for row in reader:
+            head = list(row.keys())
+            del head[1]
+            head.insert(1,'first_name')
+            head.insert(2,'last_name')
+            k = list(row.values())
+            del k[1]
+            k.insert(1,row['full_name'].split()[0])
+            k.insert(2,' '.join(row['full_name'].split()[1:]))
+            new.append(k)
+    csvfile.close()
+    with open('analytics/studentinfo_cs384_names_split.csv', newline='',mode='w') as f:
+        f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+        f_write.writerow(head)
+        for i in new:
+            f_write.writerow(i)
+    f.close()
+    #sorting
+    dic = {}
+    for i in new:
+        dic[i[1]]='#$%^&*'.join(i)
+    new = []
+    with open('analytics/studentinfo_cs384_names_split_sorted_first_name.csv', mode = 'w') as f:
+        print
+    f.close()
+    for i in sorted(dic.items()):
+        new.append(i[1].split('#$%^&*'))
+    with open('analytics/studentinfo_cs384_names_split_sorted_first_name.csv', mode = 'a') as f:
+        f_write = csv.writer(f, delimiter=',',lineterminator='\r')
+        f_write.writerow(head)
+        for i in new:
+            f_write.writerow(i)
+    f.close()

+ 8 - 0
target/File/utils_1.py

@@ -0,0 +1,8 @@
+def save_parameters(args, run_name):
+    with open(os.path.join(args.log_path, run_name)+'/parameters.txt', 'w') as f:
+        f.write('num_blocks {}, lr {}, beta1 {} beta2 {}, batch_size {} gamma  {} scheduler_step {}'.format(
+            args.num_blocks, args.lr, args.beta1, args.beta2, args.batch_size, 
+            args.gamma, args.scheduler_step
+        ))
+
+

+ 20 - 0
target/File/utils_2.py

@@ -0,0 +1,20 @@
+def prepare_directories(args, run_name):
+    if not os.path.isdir(args.data_path):
+        raise Exception("Invalid data path. No such directory")
+
+    if not os.path.isdir(args.log_path):
+        os.makedirs(args.log_path)
+
+    if args.pretrained_path:
+        if not os.path.isdir(args.pretrained_path) or \
+                not os.path.isdir(os.path.join(args.pretrained_path, 'states')):
+            raise Exception("Invalid path. No such directory with pretrained model")
+
+    else:
+        exp_path = os.path.join(args.log_path, run_name)
+        os.makedirs(exp_path)
+        os.makedirs(os.path.join(exp_path, 'samples'))
+        os.makedirs(os.path.join(exp_path, 'states'))
+        os.makedirs(os.path.join(exp_path, 'tensorboard_logs'))
+
+

+ 9 - 0
target/File/utils_3.py

@@ -0,0 +1,9 @@
+def build_model(args):
+    model = DeepGL(args.num_blocks)
+    if args.pretrained_path:
+        model.load_state_dict(torch.load(
+            os.path.join(args.pretrained_path, 'samples') + '/' + str(args.load_step) + '.pt'))
+
+    return model
+
+

+ 5 - 0
target/File/utils_4.py

@@ -0,0 +1,5 @@
+def prepare_logger(path):
+    if not os.path.isdir(path):
+        os.makedirs(path)
+    logger = Logger(path)
+    return logger

+ 6 - 0
target/File/visualize_2_1.py

@@ -0,0 +1,6 @@
+def load_examples(file_name):
+    with open(file_name) as f:
+        return [s.replace('\n', '') for s in f.readlines()]
+
+
+    

+ 33 - 0
target/Hash/EncrypC_12.py

@@ -0,0 +1,33 @@
+def encrypt_execute(self):
+    self.freeze_controls()
+
+    try:
+        self._cipher = EncryptionTool(
+            self._file_url.get(), self._secret_key.get(), self._salt.get()
+        )
+        for percentage in self._cipher.encrypt():
+            if self.should_cancel:
+                break
+            percentage = "{0:.2f}%".format(percentage)
+            self._status.set(percentage)
+            self.status_label.update()
+
+        if self.should_cancel:
+            self._cipher.abort()
+            self._status.set("Cancellation Successful !!")
+            messagebox.showinfo("EncrypC", "Cancellation Successful !!")
+            self._cipher = None
+            self.should_cancel = False
+            self.unfreeze_controls()
+            return
+
+        self._cipher = None
+        self.should_cancel = False
+        self._status.set("File Encryption Successful !!")
+        messagebox.showinfo("EncrypC", "File Encryption Successful !!")
+    except Exception as e:
+
+        self._status.set(e)
+
+    self.unfreeze_controls()
+

+ 33 - 0
target/Hash/EncrypC_14.py

@@ -0,0 +1,33 @@
+def decrypt_execute(self):
+    self.freeze_controls()
+
+    try:
+        self._cipher = EncryptionTool(
+            self._file_url.get(), self._secret_key.get(), self._salt.get()
+        )
+        for percentage in self._cipher.decrypt():
+            if self.should_cancel:
+                break
+            percentage = "{0:.2f}%".format(percentage)
+            self._status.set(percentage)
+            self.status_label.update()
+
+        if self.should_cancel:
+            self._cipher.abort()
+            self._status.set("Cancellation Successful !!")
+            messagebox.showinfo("EncrypC", "Cancellation Successful !!")
+            self._cipher = None
+            self.should_cancel = False
+            self.unfreeze_controls()
+            return
+
+        self._cipher = None
+        self.should_cancel = False
+        self._status.set("File Decryption Successful !!")
+        messagebox.showinfo("EncrypC", "File Decryption Successful !!")
+    except Exception as e:
+
+        self._status.set(e)
+
+    self.unfreeze_controls()
+

+ 27 - 0
target/Hash/EncrypC_2.py

@@ -0,0 +1,27 @@
+def encrypt(self):
+
+    # create a cipher object
+
+    cipher_object = AES.new(
+        self.hashed_key_salt["key"], AES.MODE_CFB, self.hashed_key_salt["salt"]
+    )
+
+    self.abort()  # if the output file already exists, remove it first
+
+    input_file = open(self.user_file, "rb")
+    output_file = open(self.encrypt_output_file, "ab")
+    done_chunks = 0
+
+    for piece in self.read_in_chunks(input_file, self.chunk_size):
+        encrypted_content = cipher_object.encrypt(piece)
+        output_file.write(encrypted_content)
+        done_chunks += 1
+        yield done_chunks / self.total_chunks * 100
+
+    input_file.close()
+    output_file.close()
+
+    # clean up the cipher object
+
+    del cipher_object
+

+ 27 - 0
target/Hash/EncrypC_3.py

@@ -0,0 +1,27 @@
+def decrypt(self):
+
+    #  exact same as above function except in reverse
+
+    cipher_object = AES.new(
+        self.hashed_key_salt["key"], AES.MODE_CFB, self.hashed_key_salt["salt"]
+    )
+
+    self.abort()  # if the output file already exists, remove it first
+
+    input_file = open(self.user_file, "rb")
+    output_file = open(self.decrypt_output_file, "xb")
+    done_chunks = 0
+
+    for piece in self.read_in_chunks(input_file):
+        decrypted_content = cipher_object.decrypt(piece)
+        output_file.write(decrypted_content)
+        done_chunks += 1
+        yield done_chunks / self.total_chunks * 100
+
+    input_file.close()
+    output_file.close()
+
+    # clean up the cipher object
+
+    del cipher_object
+

+ 31 - 0
target/Hash/EncrypC_5.py

@@ -0,0 +1,31 @@
+def hash_key_salt(self):
+
+    # --- convert key to hash
+    #  create a new hash object
+
+    hasher = hashlib.new(self.hash_type)
+    hasher.update(self.user_key)
+
+    # turn the output key hash into 32 bytes (256 bits)
+
+    self.hashed_key_salt["key"] = bytes(hasher.hexdigest()[:32], "utf-8")
+
+    # clean up hash object
+
+    del hasher
+
+    # --- convert salt to hash
+    #  create a new hash object
+
+    hasher = hashlib.new(self.hash_type)
+    hasher.update(self.user_salt)
+
+    # turn the output salt hash into 16 bytes (128 bits)
+
+    self.hashed_key_salt["salt"] = bytes(hasher.hexdigest()[:16], "utf-8")
+
+    # clean up hash object
+
+    del hasher
+
+

+ 21 - 0
target/Hash/EncryptionDecryption_1.py

@@ -0,0 +1,21 @@
+def encrypt(self, key, filename):
+    chunksize = 128 * 1024
+    outFile = os.path.join(os.path.dirname(filename), "(Secured)" + os.path.basename(filename))
+    filesize = str(os.path.getsize(filename)).zfill(16)
+    IV = Random.new().read(AES.block_size)
+    print(IV, len(IV))
+    encryptor = AES.new(key, AES.MODE_CBC, IV)
+
+    with open(filename, "rb") as infile:
+        with open(outFile, "wb") as outfile:
+            outfile.write(filesize.encode('utf-8'))
+            outfile.write(IV)
+            while True:
+                chunk = infile.read(chunksize)
+                if len(chunk) == 0:
+                    break
+                elif len(chunk) % 16 != 0:
+                    chunk += b' ' * (16 - (len(chunk) % 16))
+                outfile.write(encryptor.encrypt(chunk))
+    return outFile
+

+ 18 - 0
target/Hash/EncryptionDecryption_2.py

@@ -0,0 +1,18 @@
+def decrypt(self, key, filename):
+    outFile = os.path.join(os.path.dirname(filename),
+                           os.path.basename(filename).replace("(Secured)", ""))
+    print(outFile)
+    chunksize = 128 * 1024
+    with open(filename, "rb") as infile:
+        filesize = infile.read(16)
+        IV = infile.read(16)
+        decryptor = AES.new(key, AES.MODE_CBC, IV)
+
+        with open(outFile, "wb") as outfile:
+            while True:
+                chunk = infile.read(chunksize)
+                if len(chunk) == 0:
+                    break
+                outfile.write(decryptor.decrypt(chunk))
+            outfile.truncate(int(filesize))
+    return outFile

+ 8 - 0
target/Hash/Encryption_And_Hashing/Task2_6.py

@@ -0,0 +1,8 @@
+def generateKey(size):
+    start = timer()
+    key = get_random_bytes(size)
+    end= timer()
+    print("Time Taken to generate Key is ",(end-start))
+    return key
+    
+

+ 13 - 0
target/Hash/Encryption_And_Hashing/Task2_7.py

@@ -0,0 +1,13 @@
+def encryptInCTR(key,data,fileNameToEncrypt):
+    data=bytes(data, 'utf-8')
+    cipher = AES.new(key, AES.MODE_CTR)
+    start = timer()
+    ct_bytes = cipher.encrypt(data)
+    end= timer()
+    print("Time Taken to Encrypt is ",(end-start))
+    print("Time Taken to Decrypt per byte is ",(end-start)/len(data))
+    nonce = b64encode(cipher.nonce).decode('utf-8')
+    ct = b64encode(ct_bytes).decode('utf-8')
+    writeFileInBytes(bytes(ct,"utf-8"),fileNameToEncrypt)
+    return nonce
+

+ 15 - 0
target/Hash/Encryption_And_Hashing/Task2_8.py

@@ -0,0 +1,15 @@
+def decryptInCTR(key,fileNameToDecrypt,nonce,fileNameToSavePT):
+    ct = b64decode(getDataInBytes(fileNameToDecrypt))
+    nonce = b64decode(nonce)
+    cipher = AES.new(key, AES.MODE_CTR, nonce=nonce)
+    start=timer()
+    pt = cipher.decrypt(ct)
+    end=timer()
+    print("Time Taken to Decrypt is ",(end-start))
+    print("Time Taken to Decrypt per byte is ",((end-start)/len(pt)))
+    pt=pt.decode("utf-8")
+    writeFile(pt,fileNameToSavePT)
+    return pt
+
+
+

+ 15 - 0
target/Hash/Encryption_And_Hashing/Task4_6.py

@@ -0,0 +1,15 @@
+def generateHashUsingSHA256(fileName):
+    print("SHA_256")
+    data=getData(fileName)
+    data=bytes(data,"utf-8")
+    start = timer()
+    h = SHA256.new()
+    h.update(data)
+    #Because this looks better when viewed instead of h.digest()
+    digest=h.hexdigest()
+    end = timer()
+    #print(digest)
+    print("Time Taken to generate Hash is ",(end-start))
+    print("Time Taken to Hash per byte is ",(end-start)/len(data))
+    
+    

+ 14 - 0
target/Hash/Encryption_And_Hashing/Task4_7.py

@@ -0,0 +1,14 @@
+def generateHashUsingSHA512(fileName):
+    print("SHA_512")
+    data=getData(fileName)
+    data=bytes(data,"utf-8")
+    start = timer()
+    h = SHA512.new()
+    h.update(data)
+    #Because this looks better when viewed instead of h.digest()
+    digest=h.hexdigest()
+    end = timer()
+    #print(digest)
+    print("Time Taken to generate Hash is ",(end-start))
+    print("Time Taken to Hash per byte is ",(end-start)/len(data))
+    

+ 13 - 0
target/Hash/Encryption_And_Hashing/Task4_8.py

@@ -0,0 +1,13 @@
+def generateHashUsingSHA3_256(fileName):
+    print("SHA3_256")
+    data=getData(fileName)
+    data=bytes(data,"utf-8")
+    start = timer()
+    h = SHA3_256.new()
+    h.update(data)
+    #Because this looks better when viewed instead of h.digest()
+    digest=h.hexdigest()
+    end = timer()
+    #print(digest)
+    print("Time Taken to generate Hash is ",(end-start))
+    print("Time Taken to Hash per byte is ",(end-start)/len(data))

+ 39 - 0
target/Hash/Encryption_And_Hashing/Task5_3.py

@@ -0,0 +1,39 @@
+def rsa(message,size):
+   start = timer()
+   key = RSA.generate(size)
+   key_gen_time=  timer() - start
+   
+   cipher = PKCS1_OAEP.new(key.publickey())
+   buffer=""
+   chunks=[]
+   size=214
+   parts=int(len(message)/size)
+   x=0
+   for i in range(parts):
+       chunks.append(message[size*i:size*(i+1)])
+       x=i
+       
+   chunks.append(message[(x+1)*size:])
+   ciphers=[]
+   start = timer() 
+   for chunk in chunks:
+       chunk_bytes=chunk.encode("utf-8")
+       ciphertext = cipher.encrypt(chunk_bytes)
+       ciphers.append(ciphertext)
+   encrypt_time=timer() - start
+   #decryption
+   cipher = PKCS1_OAEP.new(key)
+   start = timer()  
+   for ciphertext in ciphers:
+       pt = cipher.decrypt(ciphertext)
+       buffer+=pt.decode("utf-8")
+   decrypt_time = timer() - start
+   if buffer==message:
+       print("successfull encryption and decryption")
+   else:
+       print("Incorrect encryption/decryption")
+   print("time taken for key gen ",(key_gen_time))
+   print("time taken to encrypt ",(encrypt_time))
+   print("Time Taken to Encrypt per byte is ",((encrypt_time)/len(message)))
+   print("time taken to Decrypt ",(decrypt_time))
+   print("Time Taken to Decrypt per byte is ",((decrypt_time)/len(message)))

+ 9 - 0
target/Hash/Encryption_And_Hashing/Task7_3.py

@@ -0,0 +1,9 @@
+def generatePrivateKey(size):
+    start=timer()
+    private_key = dsa.generate_private_key(key_size=size,backend=default_backend())
+    end=timer()
+    print("Time Taken to generate key is ",(end-start))
+    return private_key
+
+
+

+ 10 - 0
target/Hash/Encryption_And_Hashing/Task7_4.py

@@ -0,0 +1,10 @@
+def sign(private_key,fileName):
+    data=getData(fileName)
+    data=data.encode("utf-8")
+    start=timer()
+    signature = private_key.sign(data,hashes.SHA256())
+    end=timer()
+    print("Time Taken to generate signature is ",(end-start))
+    print("Time Taken per byte to sign is ",((end-start)/len(data)))
+    return signature
+

+ 11 - 0
target/Hash/Encryption_And_Hashing/Task7_5.py

@@ -0,0 +1,11 @@
+def verify(signature,private_key,fileName):
+    data=getData(fileName)
+    data=data.encode("utf-8")
+    start=timer()
+    public_key = private_key.public_key()
+    public_key.verify(
+        signature,
+        data,
+        hashes.SHA256()
+    )
+    end=timer()

+ 13 - 0
target/Hash/Task1_7.py

@@ -0,0 +1,13 @@
+def encryptInCBC(key,data,fileNameToEncrypt):
+    data=bytes(data, 'utf-8')
+    cipher = AES.new(key, AES.MODE_CBC)
+    start = timer()
+    ct_bytes = cipher.encrypt(pad(data, AES.block_size))
+    end= timer()
+    print("Time Taken to Encrypt is "+str(end-start))
+    print("Time Taken to Encrypt per byte is ",(end-start)/len(data))
+    iv = b64encode(cipher.iv).decode('utf-8')
+    ct = b64encode(ct_bytes).decode('utf-8')
+    writeFileInBytes(bytes(ct,"utf-8"),fileNameToEncrypt)
+    return iv
+

+ 16 - 0
target/Hash/Task1_8.py

@@ -0,0 +1,16 @@
+def decryptInCBC(key,fileNameToDecrypt,iv,fileNameToSavePT):
+    iv = b64decode(iv)
+    ct = b64decode(getDataInBytes(fileNameToDecrypt))
+    cipher = AES.new(key, AES.MODE_CBC, iv)
+    start=timer()
+    pt = unpad(cipher.decrypt(ct), AES.block_size)
+    end=timer()
+    print("Time Taken to Decrypt is ",end-start)
+    print("Time Taken to Decrypt per byte is ",(end-start)/len(pt))
+    pt=pt.decode("utf-8")
+    writeFile(pt,fileNameToSavePT)
+    return pt
+
+
+
+

+ 13 - 0
target/Hash/base64_2.py

@@ -0,0 +1,13 @@
+def encryptFile():
+    myFile = input("enter file to encrypt: ")
+    file = open(myFile,"r")
+    contents = file.read()
+    contents = contents.encode()
+    file = open(myFile, "w")
+    encoded = base64.b64encode(contents)
+    # the .decode() converts the bytes to str, taking off the b'...'
+    file.write(str(encoded))
+    print ("File is now encrypted... and the contents is unreadable")
+
+
+

+ 16 - 0
target/Hash/base64_3.py

@@ -0,0 +1,16 @@
+def decryptMessage():
+    pwd = "N3VIQUJmZ2pyNDVkZDRvMzNkZmd0NzBkZzlLOWRmcjJ0NWhCdmRm"
+    key = base64.b64decode(pwd) #the decoded version of this is the key.
+    value = input("Enter the decryption key: ").encode()
+    if value == key:
+        time.sleep(1)
+        message = input("Enter the message to decode: ")
+        decoded = base64.b64decode(message)
+        print (decoded)
+        menu()
+        
+    else:
+        print("Decryption key is wrong.")
+        menu()
+
+

+ 7 - 0
target/Hash/base64_4.py

@@ -0,0 +1,7 @@
+def encrypt():
+    password = input("Enter a message: ").encode()
+    encoded = base64.b64encode(password)
+    print (encoded.decode()) 
+    menu()
+
+

+ 3 - 0
target/Hash/base64_5.py

@@ -0,0 +1,3 @@
+def hashing(password):
+    hash1 = hashlib.md5(str.encode(password)).hexdigest()
+    print ("your hashed password is:", hash1,"\n")

+ 74 - 0
target/Hash/biometry_hash_1.py

@@ -0,0 +1,74 @@
+def enroll():
+    
+    ## Enrolls new finger
+    ##
+
+    ## Tries to initialize the sensor
+    try:
+        f = PyFingerprint('/dev/ttyUSB0', 57600, 0xFFFFFFFF, 0x00000000)
+
+        if ( f.verifyPassword() == False ):
+            raise ValueError('The given fingerprint sensor password is wrong!')
+
+    except Exception as e:
+        print('The fingerprint sensor could not be initialized!')
+        print('Exception message: ' + str(e))
+        exit(1)
+
+
+    ## Gets some sensor information
+    print('Currently stored templates: ' + str(f.getTemplateCount()))
+
+    ## Tries to enroll new finger
+    try:
+        print('Waiting for finger...')
+
+        ## Wait that finger is read
+        while ( f.readImage() == False ):
+            pass
+
+        ## Converts read image to characteristics and stores it in charbuffer 1
+        f.convertImage(0x01)
+        
+        ## Checks if finger is already enrolled
+        result = f.searchTemplate()
+        positionNumber = result[0]
+
+        
+        if ( positionNumber >= 0 ):
+            f.loadTemplate(positionNumber, 0x01)
+            characterics = str(f.downloadCharacteristics(0x01))
+            passhashes = hashlib.sha256(characterics).hexdigest()
+            passhash = passhashes[0:32]
+            print('Template already exists at position #' + str(positionNumber))
+            return passhash
+
+        time.sleep(2)
+        ## Wait that finger is read again
+        while ( f.readImage() == False ):
+            pass
+        ## Converts read image to characteristics and stores it in charbuffer 2
+        f.convertImage(0x02)
+
+        ## Compares the charbuffers and creates a template
+        f.createTemplate()
+
+        ## Gets new position number (the counting starts at 0, so we do not need to increment)
+        positionNumber = f.getTemplateCount()
+
+        ## Saves template at new position number
+        if ( f.storeTemplate(positionNumber) == True ):
+            ## Hashes characteristics of template
+            
+            characterics = str(f.downloadCharacteristics(0x01))
+            passhashes = hashlib.sha256(characterics).hexdigest()
+            passhash = passhashes[0:32]
+            ## Hashes characteristics of template
+            print('SHA-2 hash of template: ' + passhash)
+            return passhash
+            
+    except Exception as e:
+        exit(1)
+        #print('SHA-2 hash of template: ' + hashlib.sha256(characterics).hexdigest())
+            
+    

+ 3 - 0
target/Hash/biometry_hash_10.py

@@ -0,0 +1,3 @@
+def getKey(password):
+	hasher = SHA256.new(password)
+	return hasher.digest()

+ 21 - 0
target/Hash/biometry_hash_12.py

@@ -0,0 +1,21 @@
+def AESmenu(EncDec):
+    
+    choice = raw_input("Would you like to (E)ncrypt or (D)ecrypt?: ")
+    f_in = raw_input("Insert the filename with extension: ")
+    fileblob = file_check(f_in)
+    while fileblob == 0:
+        f_in = raw_input("insert the filename with extensions")
+        fileblob = file_check(f_in)
+    print( f_in  )  
+    #print "3rd time: ", EncDec
+    if choice == 'E':
+            #filename = raw_input("File to encrypt: ")
+            encrypt(EncDec, f_in)
+            print ("Done.")
+    elif choice == 'D':
+            decrypt(EncDec, f_in)
+            print( "Done.")
+    else:
+            print ("No Option selected, closing...")
+
+        

+ 35 - 0
target/Hash/biometry_hash_2.py

@@ -0,0 +1,35 @@
+def index():
+    
+
+    ## Shows the template index table
+    ##
+
+    ## Tries to initialize the sensor
+    try:
+        f = PyFingerprint('/dev/ttyUSB0', 57600, 0xFFFFFFFF, 0x00000000)
+
+        if ( f.verifyPassword() == False ):
+            raise ValueError('The given fingerprint sensor password is wrong!')
+
+    except Exception as e:
+        print('The fingerprint sensor could not be initialized!')
+        print('Exception message: ' + str(e))
+        exit(1)
+
+    ## Gets some sensor information
+    print('Currently stored templates: ' + str(f.getTemplateCount()))
+
+    ## Tries to show a template index table page
+    try:
+        page = raw_input('Please enter the index page (0, 1, 2, 3) you want to see: ')
+        page = int(page)
+
+        tableIndex = f.getTemplateIndex(page)
+
+        for i in range(0, len(tableIndex)):
+            print('Template at position #' + str(i) + ' is used: ' + str(tableIndex[i]))
+
+    except Exception as e:
+        print('Operation failed!')
+        print('Exception message: ' + str(e))
+        exit(1)

+ 39 - 0
target/Hash/biometry_hash_3.py

@@ -0,0 +1,39 @@
+def fp_download():
+
+    ## Reads image and download it
+
+    ## Tries to initialize the sensor
+    try:
+        f = PyFingerprint('/dev/ttyUSB0', 57600, 0xFFFFFFFF, 0x00000000)
+
+        if ( f.verifyPassword() == False ):
+            raise ValueError('The given fingerprint sensor password is wrong!')
+
+    except Exception as e:
+        print('The fingerprint sensor could not be initialized!')
+        print('Exception message: ' + str(e))
+        exit(1)
+
+    ## Gets some sensor information
+    print('Currently stored templates: ' + str(f.getTemplateCount()))
+
+    ## Tries to read image and download it
+    try:
+        print('Waiting for finger...')
+
+        ## Wait that finger is read
+        while ( f.readImage() == False ):
+            pass
+
+        print('Downloading image (this take a while)...')
+
+        imageDestination =  tempfile.gettempdir() + '/fingerprint.bmp'
+        f.downloadImage(imageDestination)
+
+        print('The image was saved to "' + imageDestination + '".')
+
+    except Exception as e:
+        print('Operation failed!')
+        print('Exception message: ' + str(e))
+        exit(1)
+

+ 69 - 0
target/Hash/biometry_hash_5.py

@@ -0,0 +1,69 @@
+def fp_search():
+        
+    """
+    PyFingerprint
+    Copyright (C) 2015 Bastian Raschke <bastian.raschke@posteo.de>
+    All rights reserved.
+
+    @author: Bastian Raschke <bastian.raschke@posteo.de>
+    """
+
+
+    ## Search for a finger
+    ##
+
+    ## Tries to initialize the sensor
+    try:
+        f = PyFingerprint('/dev/ttyUSB0', 57600, 0xFFFFFFFF, 0x00000000)
+
+        if ( f.verifyPassword() == False ):
+            raise ValueError('The given fingerprint sensor password is wrong!')
+
+    except Exception as e:
+        print('The fingerprint sensor could not be initialized!')
+        print('Exception message: ' + str(e))
+        exit(1)
+
+    ## Gets some sensor information
+    print('Currently stored templates: ' + str(f.getTemplateCount()))
+
+    ## Tries to search the finger and calculate hash
+    try:
+        print('Waiting for finger...')
+
+        ## Wait that finger is read
+        while ( f.readImage() == False ):
+            pass
+
+        ## Converts read image to characteristics and stores it in charbuffer 1
+        f.convertImage(0x01)
+
+        ## Searchs template
+        result = f.searchTemplate()
+
+        positionNumber = result[0]
+        accuracyScore = result[1]
+
+        if ( positionNumber == -1 ):
+            print('No match found!')
+            exit(0)
+        else:
+            print('Found template at position #' + str(positionNumber))
+            print('The accuracy score is: ' + str(accuracyScore))
+
+        ## OPTIONAL stuff
+        ##
+
+        ## Loads the found template to charbuffer 1
+        f.loadTemplate(positionNumber, 0x01)
+
+        ## Downloads the characteristics of template loaded in charbuffer 1
+        characterics = str(f.downloadCharacteristics(0x01))
+
+        ## Hashes characteristics of template
+        print('SHA-2 hash of template: ' + hashlib.sha256(characterics).hexdigest())
+
+    except Exception as e:
+        print('Operation failed!')
+        print('Exception message: ' + str(e))
+        exit(1)

+ 29 - 0
target/Hash/biometry_hash_8.py

@@ -0,0 +1,29 @@
+def encrypt(key, filename):
+	chunksize = 64*1024
+	#print filename
+	#print "4th time: ", key
+	outputFile = "(encrypted)"+filename
+	filesize = str(os.path.getsize(filename)).zfill(16)
+	IV = ''
+
+	for i in range(16):
+		IV += chr(random.randint(0, 0xFF))
+
+	encryptor = AES.new(key, AES.MODE_CBC, IV)
+
+	with open(filename, 'rb') as infile:
+		with open(outputFile, 'wb') as outfile:
+			outfile.write(filesize)
+			outfile.write(IV)
+			
+			while True:
+				chunk = infile.read(chunksize)
+				
+				if len(chunk) == 0:
+					break
+				elif len(chunk) % 16 != 0:
+					chunk += ' ' * (16 - (len(chunk) % 16))
+
+				outfile.write(encryptor.encrypt(chunk))
+
+

+ 19 - 0
target/Hash/biometry_hash_9.py

@@ -0,0 +1,19 @@
+def decrypt(key, filename):
+	chunksize = 64*1024
+	outputFile = filename[11:]
+	
+	with open(filename, 'rb') as infile:
+		filesize = long(infile.read(16))
+		IV = infile.read(16)
+
+		decryptor = AES.new(key, AES.MODE_CBC, IV)
+
+		with open(outputFile, 'wb') as outfile:
+			while True:
+				chunk = infile.read(chunksize)
+
+				if len(chunk) == 0:
+					break
+
+				outfile.write(decryptor.decrypt(chunk))
+			outfile.truncate(filesize)

+ 25 - 0
target/Hash/crypto_1.py

@@ -0,0 +1,25 @@
+def aes(key, data, counter=False):
+    """ encrypt data with aes, using either pycryptopp or PyCrypto.
+        Args
+            key: The encryption key
+            data: plain text data
+            counter: a callable, usually not needed
+    """
+    # using either pycryptopp...
+    if hasattr(AES, "process"):
+        a = AES(key)
+        return a.process(data)
+    # ... or PyCrypto
+    counter = counter or Counter()
+    a = AES.new(key, AES.MODE_CTR, counter=counter)
+    rest = len(data) % 16
+    if not rest:
+        return a.encrypt(data)
+    # Data length must be a multiple of 16
+    # Pad with bytes all of the same value as the number of padding bytes
+    pad = (16 - rest)
+    data += chr(pad) * pad
+    return a.encrypt(data)[:-pad]
+
+
+    

+ 3 - 0
target/Hash/crypto_3.py

@@ -0,0 +1,3 @@
+def digest(self):
+    return sha256(self.h.digest()).digest()[:self.truncate_to]
+

+ 2 - 0
target/Hash/crypto_4.py

@@ -0,0 +1,2 @@
+def hexdigest(self):
+    return self.digest().encode('hex')

+ 8 - 0
target/Hash/crypto_5.py

@@ -0,0 +1,8 @@
+def set_convergence_secret(self, secret):
+    """ sets the secret used to defeat confirmation-of-a-file attack
+    """
+    secret = clean_string(secret)
+    if self.__convergence_secret and self.__convergence_secret != secret:
+        msg = "Do not change the convergence secret during encryption!"
+        raise CryptError(msg)
+    self.__convergence_secret = secret

+ 15 - 0
target/Hash/crypto_6.py

@@ -0,0 +1,15 @@
+def encrypt(self, data):
+    """ encrypt data with convergence encryption.
+
+        Args
+            data: str, the plain text to be encrypted
+    
+        Returns
+            key: hash(block), encryption key
+            id: hash(hash(block), block ID
+            ciphertext: enc(key, block)
+    """
+    assert(isinstance(data, str))
+    key, id = self.__sec_key(data)
+    return key, id, aes(key, data)
+

+ 25 - 0
target/Hash/crypto_7.py

@@ -0,0 +1,25 @@
+def decrypt(self, key, ciphertext, verify=False):
+    """ decrypt data with convergence encryption.
+    
+        Args
+            key: str, encryption key
+            cipher: str, ciphertext
+            verify: bool, verify decrypted data, default: False
+    
+        Returns
+            the plain text
+    """
+    plain = aes(key, ciphertext)
+    if verify:
+        h = SHA256d(plain)
+        if self.__convergence_secret:
+            h.update(self.__convergence_secret)
+        digest = h.digest()
+        # can verify only if convergence secret is known!
+        if self.__convergence_secret and not key == digest:
+            msg = "Block verification error on %s." % SHA256d(key).hexdigest()
+            log.error(msg)
+            raise CryptError(msg)
+    return plain
+
+

+ 17 - 0
target/Hash/crypto_8.py

@@ -0,0 +1,17 @@
+def encrypt_key(key, nonce, data):
+    """ use "key" and "nonce" to generate a one time key and en-/decrypt
+        "data" with the one time key.
+
+        Args
+            key: encryption key
+            nounce: exactly once used string (try a time-based UUID)
+            data: the encrypted data
+        Returns
+            ciphertext: AES256 encrypted data
+    """
+
+    key = clean_string(key)
+    key = SHA256d(key).digest()
+    nonce_hash = SHA256d(nonce).digest()# assert 32 bytes key
+    enc_key = aes(key, nonce_hash)      # generate encryption key
+    return aes(enc_key, data)           # encrypt data using the new key

+ 8 - 0
target/Hash/dirist_14.py

@@ -0,0 +1,8 @@
+def hasher(key):
+	try:
+		key = key.encode()
+		x = hashlib.sha224(key).hexdigest()
+		return(int(str(x),16))
+	except:
+		return -1
+

+ 12 - 0
target/Hash/dirist_15.py

@@ -0,0 +1,12 @@
+def encrypt(key,text,dec=1):
+	if __name__ == '__main__':
+		encrypted = ""
+		key = hasher(key)
+		dupkey = key
+		for i in range(len(text)):
+			new = chr(ord(text[i])+ dec*(dupkey%10))
+			encrypted = encrypted + new
+			if dupkey==0: dupkey = key
+			dupkey = int(dupkey/10)
+		return encrypted	
+

Nem az összes módosított fájl került megjelenítésre, mert túl sok fájl változott