2 Commits bea9142d64 ... 477aca1605

Autor SHA1 Mensagem Data
  wendaojidian 477aca1605 Merge remote-tracking branch 'origin/master' 2 anos atrás
  wendaojidian 31ce09d602 merge sjim 2 anos atrás
100 arquivos alterados com 2635 adições e 0 exclusões
  1. 21 0
      target/Azure/AddUp/Azure-blob-storage_4.py
  2. 15 0
      target/Azure/AddUp/Azure-blob-storage_5.py
  3. 29 0
      target/Azure/AddUp/blob-upload-1_1.py
  4. 9 0
      target/Azure/AddUp/blob-upload-2_3.py
  5. 5 0
      target/Azure/AddUp/blob-upload-2_4.py
  6. 13 0
      target/Azure/AddUp/blob-upload-2_5.py
  7. 34 0
      target/Azure/AddUp/blob-upload-2_6.py
  8. 22 0
      target/Azure/AddUp/blob-upload-2_7.py
  9. 9 0
      target/Azure/AddUp/blob-upload-2_9.py
  10. 58 0
      target/Azure/AddUp/blob-upload_1.py
  11. 103 0
      target/Azure/AddUp/circuitbreaker_1.py
  12. 97 0
      target/Azure/AddUp/datafactory_4.py
  13. 28 0
      target/Azure/AddUp/file_advanced_samples_2.py
  14. 20 0
      target/Azure/AddUp/file_advanced_samples_3.py
  15. 22 0
      target/Azure/AddUp/file_advanced_samples_4.py
  16. 65 0
      target/Azure/AddUp/file_advanced_samples_6.py
  17. 22 0
      target/Azure/AddUp/file_basic_samples_2.py
  18. 105 0
      target/Azure/AddUp/file_basic_samples_3.py
  19. 29 0
      target/Azure/AddUp/file_basic_samples_4.py
  20. 40 0
      target/Azure/AddUp/python-quick-start_3.py
  21. 24 0
      target/Azure/AddUp/table_advanced_samples_2.py
  22. 18 0
      target/Azure/AddUp/table_advanced_samples_4.py
  23. 21 0
      target/Azure/AddUp/table_advanced_samples_5.py
  24. 50 0
      target/Azure/AddUp/table_advanced_samples_7.py
  25. 58 0
      target/Azure/AddUp/table_basic_samples_2.py
  26. 32 0
      target/Azure/DLfile_6.py
  27. 12 0
      target/Azure/add_azure_account_1.py
  28. 16 0
      target/Azure/add_azure_account_and_set_role_assignment_1.py
  29. 18 0
      target/Azure/add_azure_account_and_set_role_assignment_2.py
  30. 20 0
      target/Azure/add_azure_account_and_set_role_assignment_3.py
  31. 26 0
      target/Azure/add_azure_account_and_set_role_assignment_4.py
  32. 20 0
      target/Azure/add_azure_account_and_set_role_assignment_5.py
  33. 63 0
      target/Azure/add_azure_account_and_set_role_assignment_6.py
  34. 3 0
      target/Azure/adls_2.py
  35. 4 0
      target/Azure/adls_4.py
  36. 10 0
      target/Azure/azure_clients_1.py
  37. 11 0
      target/Azure/azure_clients_2.py
  38. 11 0
      target/Azure/azure_clients_3.py
  39. 11 0
      target/Azure/azure_clients_4.py
  40. 11 0
      target/Azure/azure_clients_5.py
  41. 11 0
      target/Azure/azure_clients_6.py
  42. 11 0
      target/Azure/azure_clients_7.py
  43. 13 0
      target/Azure/azure_clients_8.py
  44. 11 0
      target/Azure/azure_clients_9.py
  45. 25 0
      target/Azure/azure_rm_14.py
  46. 98 0
      target/Azure/azure_rm_15.py
  47. 59 0
      target/Azure/azure_rm_2.py
  48. 11 0
      target/Azure/azure_rm_9.py
  49. 17 0
      target/Azure/azure_rm_aks_facts_4.py
  50. 32 0
      target/Azure/azure_service_principal_attribute_1.py
  51. 7 0
      target/Azure/azure_storage_11.py
  52. 28 0
      target/Azure/azure_storage_12.py
  53. 15 0
      target/Azure/azure_storage_5.py
  54. 10 0
      target/Azure/azure_storage_8.py
  55. 25 0
      target/Azure/azure_system_helpers_2.py
  56. 9 0
      target/Azure/azure_system_helpers_3.py
  57. 3 0
      target/Azure/azure_system_helpers_4.py
  58. 3 0
      target/Azure/azure_system_helpers_5.py
  59. 3 0
      target/Azure/azure_system_helpers_6.py
  60. 15 0
      target/Azure/azure_system_helpers_7.py
  61. 16 0
      target/Azure/azure_system_helpers_8.py
  62. 22 0
      target/Azure/blob-adapter_2.py
  63. 4 0
      target/Azure/blob-adapter_3.py
  64. 10 0
      target/Azure/blob-adapter_4.py
  65. 17 0
      target/Azure/blob-permission_3.py
  66. 46 0
      target/Azure/blob-upload-1_3.py
  67. 8 0
      target/Azure/blob-upload-1_4.py
  68. 12 0
      target/Azure/blob-upload-2_4.py
  69. 18 0
      target/Azure/blob-upload-3_3.py
  70. 5 0
      target/Azure/blob-upload-4_1.py
  71. 9 0
      target/Azure/blob-upload-4_2.py
  72. 6 0
      target/Azure/blob-upload-4_3.py
  73. 10 0
      target/Azure/blob-upload-4_4.py
  74. 58 0
      target/Azure/blob-upload_1.py
  75. 40 0
      target/Azure/classAzureProvider_2.py
  76. 30 0
      target/Azure/classAzureProvider_3.py
  77. 81 0
      target/Azure/classAzureProvider_4.py
  78. 45 0
      target/Azure/classAzureProvider_6.py
  79. 23 0
      target/Azure/client_40.py
  80. 23 0
      target/Azure/client_41.py
  81. 2 0
      target/Azure/client_48.py
  82. 13 0
      target/Azure/container_volume_5.py
  83. 14 0
      target/Azure/data_lake_4.py
  84. 13 0
      target/Azure/django-blob_5.py
  85. 34 0
      target/Azure/django-blob_6.py
  86. 22 0
      target/Azure/django-blob_7.py
  87. 12 0
      target/Azure/reproduce-14067_1.py
  88. 48 0
      target/Azure/submit_azureml_pytest_1.py
  89. 40 0
      target/Azure/submit_azureml_pytest_2.py
  90. 35 0
      target/Azure/submit_azureml_pytest_3.py
  91. 16 0
      target/Azure/submit_azureml_pytest_4.py
  92. 48 0
      target/Azure/submit_azureml_pytest_5.py
  93. 96 0
      target/Azure/submit_azureml_pytest_6.py
  94. 61 0
      target/Azure/table-service_2.py
  95. 19 0
      target/Azure/table-service_3.py
  96. 24 0
      target/Azure/table-storage_2.py
  97. 14 0
      target/Azure/test_adx_2.py
  98. 14 0
      target/Azure/test_adx_3.py
  99. 13 0
      target/Azure/test_adx_4.py
  100. 23 0
      target/Azure/test_adx_5.py

+ 21 - 0
target/Azure/AddUp/Azure-blob-storage_4.py

@@ -0,0 +1,21 @@
+def create_blob_from_url(storage_connection_string,container_name):
+    try:
+        # urls to fetch into blob storage
+        url_list = get_random_images()
+
+        # Instantiate a new BlobServiceClient and a new ContainerClient
+        blob_service_client = BlobServiceClient.from_connection_string(storage_connection_string)
+        container_client = blob_service_client.get_container_client(container_name)
+
+        for u in url_list:
+            # Download file from url then upload blob file
+            r = requests.get(u, stream = True)
+            if r.status_code == 200:
+                r.raw.decode_content = True
+                blob_client = container_client.get_blob_client(get_filename_from_url(u))
+                blob_client.upload_blob(r.raw,overwrite=True)
+        return True
+        
+    except Exception as e:
+        print(e.message, e.args)
+        return False

+ 15 - 0
target/Azure/AddUp/Azure-blob-storage_5.py

@@ -0,0 +1,15 @@
+def create_blob_from_path(storage_connection_string,container_name):
+    try:
+        # Instantiate a new BlobServiceClient and a new ContainerClient
+        blob_service_client = BlobServiceClient.from_connection_string(storage_connection_string)
+        container_client = blob_service_client.get_container_client(container_name)
+
+        for f in list_files():
+            with open(f["local_path"], "rb") as data:
+                blob_client = container_client.get_blob_client(f["file_name"])
+                blob_client.upload_blob(data,overwrite=True)
+        return True
+
+    except Exception as e:
+        print(e.message, e.args)
+        return False

+ 29 - 0
target/Azure/AddUp/blob-upload-1_1.py

@@ -0,0 +1,29 @@
+def upload_file():
+    if request.method == 'POST':
+        file = request.files['file']
+        filename = secure_filename(file.filename)
+        fileextension = filename.rsplit('.',1)[1]
+        Randomfilename = id_generator()
+        filename = Randomfilename + '.' + fileextension
+        try:
+            blob_service.create_blob_from_stream(container, filename, file)
+        except Exception:
+            print ('Exception=' + Exception)
+            pass
+        ref =  'http://'+ account + '.blob.core.windows.net/' + container + '/' + filename
+        return '''
+	    <!doctype html>
+	    <title>File Link</title>
+	    <h1>Uploaded File Link</h1>
+	    <p>''' + ref + '''</p>
+	    <img src="'''+ ref +'''">
+	    '''
+    return '''
+    <!doctype html>
+    <title>Upload new File</title>
+    <h1>Upload new File</h1>
+    <form action="" method=post enctype=multipart/form-data>
+      <p><input type=file name=file>
+         <input type=submit value=Upload>
+    </form>
+    '''

+ 9 - 0
target/Azure/AddUp/blob-upload-2_3.py

@@ -0,0 +1,9 @@
+def _get_service(self):
+        if not hasattr(self, '_blob_service'):
+            self._blob_service = BlobService(
+                account_name=self.account_name,
+                account_key=self.account_key,
+                protocol='https' if self.use_ssl else 'http'
+            )
+
+        return self._blob_service

+ 5 - 0
target/Azure/AddUp/blob-upload-2_4.py

@@ -0,0 +1,5 @@
+def _get_properties(self, name):
+        return self._get_service().get_blob_properties(
+            container_name=self.container,
+            blob_name=name
+        )

+ 13 - 0
target/Azure/AddUp/blob-upload-2_5.py

@@ -0,0 +1,13 @@
+def _open(self, name, mode='rb'):
+        """
+        Return the AzureStorageFile.
+        """
+
+        from django.core.files.base import ContentFile
+
+        contents = self._get_service().get_blob_to_bytes(
+            container_name=self.container,
+            blob_name=name
+        )
+
+        return ContentFile(contents)

+ 34 - 0
target/Azure/AddUp/blob-upload-2_6.py

@@ -0,0 +1,34 @@
+def _save(self, name, content):
+        """
+        Use the Azure Storage service to write ``content`` to a remote file
+        (called ``name``).
+        """
+        
+
+        content.open()
+
+        content_type = None
+
+        if hasattr(content.file, 'content_type'):
+            content_type = content.file.content_type
+        else:
+            content_type = mimetypes.guess_type(name)[0]
+
+        cache_control = self.get_cache_control(
+            self.container,
+            name,
+            content_type
+        )
+
+        self._get_service().put_block_blob_from_file(
+            container_name=self.container,
+            blob_name=name,
+            stream=content,
+            x_ms_blob_content_type=content_type,
+            cache_control=cache_control,
+            x_ms_blob_cache_control=cache_control
+        )
+
+        content.close()
+
+        return name

+ 22 - 0
target/Azure/AddUp/blob-upload-2_7.py

@@ -0,0 +1,22 @@
+def listdir(self, path):
+        """
+        Lists the contents of the specified path, returning a 2-tuple of lists;
+        the first item being directories, the second item being files.
+        """
+
+        files = []
+
+        if path and not path.endswith('/'):
+            path = '%s/' % path
+
+        path_len = len(path)
+
+        if not path:
+            path = None
+
+        blob_list = self._get_service().list_blobs(self.container, prefix=path)
+
+        for name in blob_list:
+            files.append(name[path_len:])
+
+        return ([], files)

+ 9 - 0
target/Azure/AddUp/blob-upload-2_9.py

@@ -0,0 +1,9 @@
+def delete(self, name):
+        """
+        Deletes the file referenced by name.
+        """
+
+        try:
+            self._get_service().delete_blob(self.container, name)
+        except AzureMissingResourceHttpError:
+            pass

+ 58 - 0
target/Azure/AddUp/blob-upload_1.py

@@ -0,0 +1,58 @@
+def run_sample():
+    try:
+        # Create the BlockBlobService that is used to call the Blob service for the storage account
+        blob_service_client = BlockBlobService(
+            account_name='accountname', account_key='accountkey')
+
+        # Create a container called 'quickstartblobs'.
+        container_name = 'quickstartblobs'
+        blob_service_client.create_container(container_name)
+
+        # Set the permission so the blobs are public.
+        blob_service_client.set_container_acl(
+            container_name, public_access=PublicAccess.Container)
+
+        # Create Sample folder if it not exists, and create a file in folder Sample to test the upload and download.
+        local_path = os.path.expanduser("~/Sample")
+        if not os.path.exists(local_path):
+            os.makedirs(os.path.expanduser("~/Sample"))
+        local_file_name = "QuickStart_" + str(uuid.uuid4()) + ".txt"
+        full_path_to_file = os.path.join(local_path, local_file_name)
+
+        # Write text to the file.
+        file = open(full_path_to_file,  'w')
+        file.write("Hello, World!")
+        file.close()
+
+        print("Temp file = " + full_path_to_file)
+        print("\nUploading to Blob storage as blob" + local_file_name)
+
+        # Upload the created file, use local_file_name for the blob name
+        blob_service_client.create_blob_from_path(
+            container_name, local_file_name, full_path_to_file)
+
+        # List the blobs in the container
+        print("\nList blobs in the container")
+        generator = blob_service_client.list_blobs(container_name)
+        for blob in generator:
+            print("\t Blob name: " + blob.name)
+
+        # Download the blob(s).
+        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
+        full_path_to_file2 = os.path.join(local_path, str.replace(
+            local_file_name ,'.txt', '_DOWNLOADED.txt'))
+        print("\nDownloading blob to " + full_path_to_file2)
+        blob_service_client.get_blob_to_path(
+            container_name, local_file_name, full_path_to_file2)
+
+        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
+                         "application will exit.")
+        sys.stdout.flush()
+        input()
+
+        # Clean up resources. This includes the container and the temp files
+        blob_service_client.delete_container(container_name)
+        os.remove(full_path_to_file)
+        os.remove(full_path_to_file2)
+    except Exception as e:
+        print(e)

+ 103 - 0
target/Azure/AddUp/circuitbreaker_1.py

@@ -0,0 +1,103 @@
+def run_circuit_breaker():
+    # Name of image to use for testing.
+    image_to_upload = "HelloWorld.png"
+
+    global blob_client
+    global container_name
+    try:
+
+        # Create a reference to the blob client and container using the storage account name and key
+        blob_client = BlockBlobService(account_name, account_key)
+
+        # Make the container unique by using a UUID in the name.
+        container_name = "democontainer" + str(uuid.uuid4())
+        blob_client.create_container(container_name)
+
+    except Exception as ex:
+        print("Please make sure you have put the correct storage account name and key.")
+        print(ex)
+
+    # Define a reference to the actual blob and upload the block_blob to the newly created container
+    full_path_to_file = os.path.join(os.path.dirname(__file__), image_to_upload)
+    blob_client.create_blob_from_path(container_name, image_to_upload, full_path_to_file)
+
+    # Set the location mode to secondary, so you can check just the secondary data center.
+    blob_client.location_mode = LocationMode.SECONDARY
+    blob_client.retry = LinearRetry(backoff=0).retry
+
+    # Before proceeding, wait until the blob has been replicated to the secondary data center.
+    # Loop and check for the presence of the blob once in a second until it hits 60 seconds
+    # or until it finds it
+    counter = 0
+    while counter < 60:
+        counter += 1
+        sys.stdout.write("\nAttempt {0} to see if the blob has replicated to the secondary storage yet.".format(counter))
+        sys.stdout.flush()
+        if blob_client.exists(container_name, image_to_upload):
+            break
+
+        # Wait a second, then loop around and try again
+        # When it's finished replicating to the secondary, continue.
+        time.sleep(1)
+
+    # Set the starting LocationMode to Primary, then Secondary.
+    # Here we use the linear retry by default, but allow it to retry to secondary if
+    # the initial request to primary fails.
+    # Note that the default is Primary. You must have RA-GRS enabled to use this
+    blob_client.location_mode = LocationMode.PRIMARY
+    blob_client.retry = LinearRetry(max_attempts=retry_threshold, backoff=1).retry
+
+    ''' 
+        ************INSTRUCTIONS**************k
+        To perform the test, first replace the 'accountname' and 'accountkey' with your storage account name and key.
+        Every time it calls get_blob_to_path it will hit the response_callback function.
+
+        Next, run this app. While this loop is running, pause the program by pressing any key, and
+        put the intercept code in Fiddler (that will intercept and return a 503).
+
+        For instructions on modifying Fiddler, look at the Fiddler_script.text file in this project.
+        There are also full instructions in the ReadMe_Instructions.txt file included in this project.
+
+        After adding the custom script to Fiddler, calls to primary storage will fail with a retryable
+        error which will trigger the Retrying event (above).
+        Then it will switch over and read the secondary. It will do that 20 times, then try to
+        switch back to the primary.
+        After seeing that happen, pause this again and remove the intercepting Fiddler code
+        Then you'll see it return to the primary and finish.
+        '''
+
+    print("\n\nThe application will pause at 200 unit interval")
+
+    for i in range(0, 1000):
+        if blob_client.location_mode == LocationMode.SECONDARY:
+            sys.stdout.write("S{0} ".format(str(i)))
+        else:
+            sys.stdout.write("P{0} ".format(str(i)))
+        sys.stdout.flush()
+
+        try:
+
+            # These function is called immediately after retry evaluation is performed.
+            # It is used to trigger the change from primary to secondary and back
+            blob_client.retry_callback = retry_callback
+
+            # Download the file
+            blob_client.get_blob_to_path(container_name, image_to_upload,
+                                                str.replace(full_path_to_file, ".png", "Copy.png"))
+
+            # Set the application to pause at 200 unit intervals to implement simulated failures
+            if i == 200 or i == 400 or i == 600 or i == 800:
+                sys.stdout.write("\nPress the Enter key to resume")
+                sys.stdout.flush()
+                if sys.version_info[0] < 3:
+                    raw_input()
+                else:
+                    input()
+        except Exception as ex:
+            print(ex)
+        finally:
+            # Force an exists call to succeed by resetting the status
+            blob_client.response_callback = response_callback
+
+    # Clean up resources
+    blob_client.delete_container(container_name)

+ 97 - 0
target/Azure/AddUp/datafactory_4.py

@@ -0,0 +1,97 @@
+def main():
+
+    # Azure subscription ID
+    subscription_id = '<Azure subscription ID>'
+
+    # This program creates this resource group. If it's an existing resource group, comment out the code that creates the resource group
+    rg_name = '<Azure resource group name>'
+
+    # The data factory name. It must be globally unique.
+    df_name = '<Data factory name>'        
+
+    # Specify your Active Directory client ID, client secret, and tenant ID
+    credentials = ServicePrincipalCredentials(client_id='<AAD application ID>', secret='<AAD app authentication key>', tenant='<AAD tenant ID>')
+    resource_client = ResourceManagementClient(credentials, subscription_id)
+    adf_client = DataFactoryManagementClient(credentials, subscription_id)
+
+    rg_params = {'location':'eastus'}
+    df_params = {'location':'eastus'}
+
+    # create the resource group
+    # comment out if the resource group already exits
+    resource_client.resource_groups.create_or_update(rg_name, rg_params)
+
+    # Create a data factory
+    df_resource = Factory(location='eastus')
+    df = adf_client.factories.create_or_update(rg_name, df_name, df_resource)
+    print_item(df)
+    while df.provisioning_state != 'Succeeded':
+        df = adf_client.factories.get(rg_name, df_name)
+        time.sleep(1)
+
+    # Create an Azure Storage linked service
+    ls_name = 'storageLinkedService'
+
+    # Specify the name and key of your Azure Storage account
+    storage_string = SecureString('DefaultEndpointsProtocol=https;AccountName=<Azure storage account>;AccountKey=<Azure storage authentication key>')
+
+    ls_azure_storage = AzureStorageLinkedService(connection_string=storage_string)
+    ls = adf_client.linked_services.create_or_update(rg_name, df_name, ls_name, ls_azure_storage)
+    print_item(ls)
+
+    # Create an Azure blob dataset (input)
+    ds_name = 'ds_in'
+    ds_ls = LinkedServiceReference(ls_name)
+    blob_path= 'adftutorial/inputpy'
+    blob_filename = 'input.txt'
+    ds_azure_blob= AzureBlobDataset(ds_ls, folder_path=blob_path, file_name = blob_filename)
+    ds = adf_client.datasets.create_or_update(rg_name, df_name, ds_name, ds_azure_blob)
+    print_item(ds)
+
+    # Create an Azure blob dataset (output)
+    dsOut_name = 'ds_out'
+    output_blobpath = 'adftutorial/outputpy'
+    dsOut_azure_blob = AzureBlobDataset(ds_ls, folder_path=output_blobpath)
+    dsOut = adf_client.datasets.create_or_update(rg_name, df_name, dsOut_name, dsOut_azure_blob)
+    print_item(dsOut)
+
+    # Create a copy activity
+    act_name =  'copyBlobtoBlob'
+    blob_source = BlobSource()
+    blob_sink = BlobSink()
+    dsin_ref = DatasetReference(ds_name)
+    dsOut_ref = DatasetReference(dsOut_name)
+    copy_activity = CopyActivity(act_name,inputs=[dsin_ref], outputs=[dsOut_ref], source=blob_source, sink=blob_sink)
+
+    # Create a pipeline with the copy activity
+    p_name =  'copyPipeline'
+    params_for_pipeline = {}
+    p_obj = PipelineResource(activities=[copy_activity], parameters=params_for_pipeline)
+    p = adf_client.pipelines.create_or_update(rg_name, df_name, p_name, p_obj)
+    print_item(p)
+
+    # Create a pipeline run
+    run_response = adf_client.pipelines.create_run(rg_name, df_name, p_name,
+        {
+        }
+    )
+
+    # Monitor the pipeilne run
+    time.sleep(30)
+    pipeline_run = adf_client.pipeline_runs.get(rg_name, df_name, run_response.run_id)
+    print("\n\tPipeline run status: {}".format(pipeline_run.status))
+    activity_runs_paged = list(adf_client.activity_runs.list_by_pipeline_run(rg_name, df_name, pipeline_run.run_id, datetime.now() - timedelta(1),  datetime.now() + timedelta(1)))
+    print_activity_run_details(activity_runs_paged[0])
+
+    # Create a trigger
+    tr_name = 'mytrigger'
+    scheduler_recurrence = ScheduleTriggerRecurrence(frequency='Minute', interval='15',start_time=datetime.now(), end_time=datetime.now() + timedelta(1), time_zone='UTC') 
+    pipeline_parameters = {'inputPath':'adftutorial/inputpy', 'outputPath':'adftutorial/outputpy'}
+    pipelines_to_run = []
+    pipeline_reference = PipelineReference('copyPipeline')
+    pipelines_to_run.append(TriggerPipelineReference(pipeline_reference, pipeline_parameters))
+    tr_properties = ScheduleTrigger(description='My scheduler trigger', pipelines = pipelines_to_run, recurrence=scheduler_recurrence)    
+    adf_client.triggers.create_or_update(rg_name, df_name, tr_name, tr_properties)
+
+    # start the trigger
+    adf_client.triggers.start(rg_name, df_name, tr_name)

+ 28 - 0
target/Azure/AddUp/file_advanced_samples_2.py

@@ -0,0 +1,28 @@
+def run_all_samples(self, connection_string):
+        print('Azure Storage File Advanced samples - Starting.')
+        
+        try:
+            # Create an instance of ShareServiceClient
+            service = ShareServiceClient.from_connection_string(conn_str=connection_string)
+
+            # List shares
+            print('\n\n* List shares *\n')
+            self.list_shares(service)
+
+            # Set Cors
+            print('\n\n* Set cors rules *\n')
+            self.set_cors_rules(service)
+
+            # Set Service Properties
+            print('\n\n* Set service properties *\n')
+            self.set_service_properties(service)
+
+            # Share, directory and file properties and metadata
+            print('\n\n* Metadata and properties *\n')
+            self.metadata_and_properties(service)
+
+        except Exception as e:
+            print('Error occurred in the sample.', e) 
+
+        finally:
+            print('\nAzure Storage File Advanced samples - Completed.\n')

+ 20 - 0
target/Azure/AddUp/file_advanced_samples_3.py

@@ -0,0 +1,20 @@
+def list_shares(self, service):
+        share_prefix = 'sharesample' + self.random_data.get_random_name(6)
+
+        try:        
+            print('1. Create multiple shares with prefix: ', share_prefix)
+            for i in range(5):
+                service.create_share(share_name=share_prefix + str(i))
+            
+            print('2. List shares')
+            shares = service.list_shares()
+            for share in shares:
+                print('  Share name:' + share.name)
+
+        except Exception as e:
+            print(e) 
+
+        finally:
+            print('3. Delete shares with prefix:' + share_prefix) 
+            for i in range(5):
+                service.delete_share(share_prefix + str(i))

+ 22 - 0
target/Azure/AddUp/file_advanced_samples_4.py

@@ -0,0 +1,22 @@
+def set_cors_rules(self, service):
+        print('1. Get Cors Rules')
+        original_cors_rules = service.get_service_properties()['cors']
+
+        print('2. Overwrite Cors Rules')
+        cors_rule = CorsRule(
+            allowed_origins=['*'], 
+            allowed_methods=['POST', 'GET'],
+            allowed_headers=['*'],
+            exposed_headers=['*'],
+            max_age_in_seconds=3600)
+
+        try:
+            service.set_service_properties(cors=[cors_rule])
+        except Exception as e:
+            print(e)
+        finally:
+            #reverting cors rules back to the original ones
+            print('3. Revert Cors Rules back the original ones')
+            service.set_service_properties(cors=original_cors_rules)
+        
+        print("CORS sample completed")

+ 65 - 0
target/Azure/AddUp/file_advanced_samples_6.py

@@ -0,0 +1,65 @@
+def metadata_and_properties(self, service):
+        share_name = 'sharename' + self.random_data.get_random_name(6)
+
+        try:
+            # All directories and share must be created in a parent share.
+            # Max capacity: 5TB per share
+            print('1. Create sample share with name ' + share_name)
+            quota = 1 # in GB
+            metadata = { "foo": "bar", "baz": "foo" }
+            share_client = service.create_share(share_name=share_name)
+            print('Sample share "'+ share_name +'" created.')
+
+            print('2. Get share properties.')
+            properties = share_client.get_share_properties()
+
+            print('3. Get share metadata.')
+            get_metadata = properties['metadata']
+            for k, v in get_metadata.items():
+                print("\t" + k + ": " + v)
+
+            dir_name = 'dirname' + self.random_data.get_random_name(6)
+
+            print('4. Create sample directory with name ' + dir_name)
+            metadata = { "abc": "def", "jkl": "mno" }
+            directory_client = share_client.create_directory(dir_name, metadata=metadata)
+            print('Sample directory "'+ dir_name +'" created.')
+
+            print('5. Get directory properties.')
+            properties = directory_client.get_directory_properties()
+            
+            print('6. Get directory metadata.')
+            get_metadata = properties['metadata']
+            for k, v in get_metadata.items():
+                print("\t" + k + ": " + v)
+
+            file_name = 'sample.txt'
+            # Uploading text to share_name/dir_name/sample.txt in Azure Files account.
+            # Max capacity: 1TB per file
+            print('7. Upload sample file from text to directory.')
+            metadata = { "prop1": "val1", "prop2": "val2" }
+            file_client = directory_client.get_file_client(file_name)
+            file_client.upload_file('Hello World! - from text sample', metadata=metadata)
+            print('Sample file "' + file_name + '" created and uploaded to: ' + share_name + '/' + dir_name)        
+
+            print('8. Get file properties.')
+            properties = file_client.get_file_properties()
+
+            print('9. Get file metadata.')
+            get_metadata = properties['metadata']
+            for k, v in get_metadata.items():
+                print("\t" + k + ": " + v)
+
+            # This is for demo purposes, all files will be deleted when share is deleted
+            print('10. Delete file.')
+            file_client.delete_file()
+
+            # This is for demo purposes, all directories will be deleted when share is deleted
+            print('11. Delete directory.')
+            directory_client.delete_directory()
+
+        finally:
+            print('12. Delete share.')
+            share_client.delete_share(share_name)
+
+        print("Metadata and properties sample completed")

+ 22 - 0
target/Azure/AddUp/file_basic_samples_2.py

@@ -0,0 +1,22 @@
+def run_all_samples(self, connection_string):
+        print('Azure Storage File Basis samples - Starting.')
+        
+        #declare variables
+        filename = 'filesample' + self.random_data.get_random_name(6)
+        sharename = 'sharesample' + self.random_data.get_random_name(6)
+        
+        try:
+            # Create an instance of ShareServiceClient
+            service = ShareServiceClient.from_connection_string(conn_str=connection_string)
+
+            print('\n\n* Basic file operations *\n')
+            self.basic_file_operations(sharename, filename, service)
+
+        except Exception as e:
+            print('error:' + e) 
+
+        finally:
+            # Delete all Azure Files created in this sample
+            self.file_delete_samples(sharename, filename, service)
+
+        print('\nAzure Storage File Basic samples - Completed.\n')

+ 105 - 0
target/Azure/AddUp/file_basic_samples_3.py

@@ -0,0 +1,105 @@
+def basic_file_operations(self, sharename, filename, service):
+        # Creating an SMB file share in your Azure Files account.
+        print('\nAttempting to create a sample file from text for upload demonstration.')   
+        # All directories and share must be created in a parent share.
+        # Max capacity: 5TB per share
+
+        print('Creating sample share.')
+        share_client = service.create_share(share_name=sharename)
+        print('Sample share "'+ sharename +'" created.')
+
+
+        # Creating an optional file directory in your Azure Files account.
+        print('Creating a sample directory.')    
+        # Get the directory client
+        directory_client = share_client.create_directory("mydirectory")
+        print('Sample directory "mydirectory" created.')
+
+
+        # Uploading text to sharename/mydirectory/my_text_file in Azure Files account.
+        # Max capacity: 1TB per file
+        print('Uploading a sample file from text.')   
+        # create_file_client
+        file_client = directory_client.get_file_client(filename)
+        # Upload a file
+        file_client.upload_file('Hello World! - from text sample')
+        print('Sample file "' + filename + '" created and uploaded to: ' + sharename + '/mydirectory')
+  
+
+        # Demonstrate how to copy a file
+        print('\nCopying file ' + filename)
+        # Create another file client which will copy the file from url
+        destination_file_client = share_client.get_file_client('file1copy')
+
+        # Copy the sample source file from the url to the destination file
+        copy_resp = destination_file_client.start_copy_from_url(source_url=file_client.url)
+        if copy_resp['copy_status'] ==  'pending':
+            # Demonstrate how to abort a copy operation (just for demo, probably will never get here)
+            print('Abort copy operation')
+            destination_file.abort_copy()
+        else:
+            print('Copy was a ' + copy_resp['copy_status'])
+        
+
+        # Demonstrate how to create a share and upload a file from a local temporary file path
+        print('\nAttempting to upload a sample file from path for upload demonstration.')  
+        # Creating a temporary file to upload to Azure Files
+        print('Creating a temporary file from text.') 
+        with tempfile.NamedTemporaryFile(delete=False) as my_temp_file:
+            my_temp_file.file.write(b"Hello world!")
+        print('Sample temporary file created.') 
+
+        # Uploading my_temp_file to sharename folder in Azure Files
+        # Max capacity: 1TB per file
+        print('Uploading a sample file from local path.')
+        # Create file_client
+        file_client = share_client.get_file_client(filename)
+
+        # Upload a file
+        with open(my_temp_file.name, "rb") as source_file:
+            file_client.upload_file(source_file)
+
+        print('Sample file "' + filename + '" uploaded from path to share: ' + sharename)
+
+        # Close the temp file
+        my_temp_file.close()
+
+        # Get the list of valid ranges and write to the specified range
+        print('\nGet list of valid ranges of the file.') 
+        file_ranges = file_client.get_ranges()
+
+        data = b'abcdefghijkl'
+        print('Put a range of data to the file.')
+        
+        file_client.upload_range(data=data, offset=file_ranges[0]['start'], length=len(data))
+
+
+        # Demonstrate how to download a file from Azure Files
+        # The following example download the file that was previously uploaded to Azure Files
+        print('\nAttempting to download a sample file from Azure files for demonstration.')
+
+        destination_file = tempfile.tempdir + '\mypathfile.txt'
+
+        with open(destination_file, "wb") as file_handle:
+            data = file_client.download_file()
+            data.readinto(file_handle)
+
+        print('Sample file downloaded to: ' + destination_file)
+
+
+        # Demonstrate how to list files and directories contains under Azure File share
+        print('\nAttempting to list files and directories directory under share "' + sharename + '":')
+
+        # Create a generator to list directories and files under share
+        # This is not a recursive listing operation
+        generator = share_client.list_directories_and_files()
+
+        # Prints the directories and files under the share
+        for file_or_dir in generator:
+            print(file_or_dir['name'])
+        
+        # remove temp file
+        os.remove(my_temp_file.name)
+
+        print('Files and directories under share "' + sharename + '" listed.')
+        print('\nCompleted successfully - Azure basic Files operations.')

+ 29 - 0
target/Azure/AddUp/file_basic_samples_4.py

@@ -0,0 +1,29 @@
+def file_delete_samples(self, sharename, filename, service):
+        print('\nDeleting all samples created for this demonstration.')
+
+        try:
+            # Deleting file: 'sharename/mydirectory/filename'
+            # This is for demo purposes only, it's unnecessary, as we're deleting the share later
+            print('Deleting a sample file.')
+
+            share_client = service.get_share_client(sharename)
+            directory_client = share_client.get_directory_client('mydirectory')
+            
+            directory_client.delete_file(file_name=filename)
+            print('Sample file "' + filename + '" deleted from: ' + sharename + '/mydirectory' )
+
+            # Deleting directory: 'sharename/mydirectory'
+            print('Deleting sample directory and all files and directories under it.')
+            share_client.delete_directory('mydirectory')
+            print('Sample directory "/mydirectory" deleted from: ' + sharename)
+
+            # Deleting share: 'sharename'
+            print('Deleting sample share ' + sharename + ' and all files and directories under it.')
+            share_client.delete_share(sharename)
+            print('Sample share "' + sharename + '" deleted.')
+
+            print('\nCompleted successfully - Azure Files samples deleted.')
+
+        except Exception as e:
+            print('********ErrorDelete***********')
+            print(e)

+ 40 - 0
target/Azure/AddUp/python-quick-start_3.py

@@ -0,0 +1,40 @@
+def upload_file_to_container(blob_storage_service_client: BlobServiceClient,
+                             container_name: str, file_path: str) -> batchmodels.ResourceFile:
+    """
+    Uploads a local file to an Azure Blob storage container.
+
+    :param blob_storage_service_client: A blob service client.
+    :param str container_name: The name of the Azure Blob storage container.
+    :param str file_path: The local path to the file.
+    :return: A ResourceFile initialized with a SAS URL appropriate for Batch
+    tasks.
+    """
+    blob_name = os.path.basename(file_path)
+    blob_client = blob_storage_service_client.get_blob_client(container_name, blob_name)
+
+    print(f'Uploading file {file_path} to container [{container_name}]...')
+
+    with open(file_path, "rb") as data:
+        blob_client.upload_blob(data, overwrite=True)
+
+    sas_token = generate_blob_sas(
+        config.STORAGE_ACCOUNT_NAME,
+        container_name,
+        blob_name,
+        account_key=config.STORAGE_ACCOUNT_KEY,
+        permission=BlobSasPermissions(read=True),
+        expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=2)
+    )
+
+    sas_url = generate_sas_url(
+        config.STORAGE_ACCOUNT_NAME,
+        config.STORAGE_ACCOUNT_DOMAIN,
+        container_name,
+        blob_name,
+        sas_token
+    )
+
+    return batchmodels.ResourceFile(
+        http_url=sas_url,
+        file_path=blob_name
+    )

+ 24 - 0
target/Azure/AddUp/table_advanced_samples_2.py

@@ -0,0 +1,24 @@
+def run_all_samples(self, account):
+        table_service = account.create_table_service()
+        print('Azure Storage Advanced Table samples - Starting.')
+        
+        print('\n\n* List tables *\n')
+        self.list_tables(table_service)
+        
+        if not account.is_azure_cosmosdb_table():
+           print('\n\n* Set service properties *\n')
+           self.set_service_properties(table_service)
+        
+           print('\n\n* Set Cors rules *\n')
+           self.set_cors_rules(table_service)
+        
+           print('\n\n* ACL operations *\n')
+           self.table_acl_operations(table_service)
+        
+        if (config.IS_EMULATED):
+            print('\n\n* Shared Access Signature is not supported in emulator *\n')
+        else:
+            print('\n\n* SAS operations *\n')
+            self.table_operations_with_sas(account)
+
+        print('\nAzure Storage Advanced Table samples - Completed.\n')

+ 18 - 0
target/Azure/AddUp/table_advanced_samples_4.py

@@ -0,0 +1,18 @@
+def set_service_properties(self, table_service):
+        print('1. Get Table service properties')
+        props = table_service.get_table_service_properties()
+
+        retention = RetentionPolicy(enabled=True, days=5)
+        logging = Logging(delete=True, read=False, write=True, retention_policy=retention)
+        hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=retention)
+        minute_metrics = Metrics(enabled=False)
+
+        try:
+            print('2. Ovewrite Table service properties')
+            table_service.set_table_service_properties(logging=logging, hour_metrics=hour_metrics, minute_metrics=minute_metrics)
+
+        finally:
+            print('3. Revert Table service properties back to the original ones')
+            table_service.set_table_service_properties(logging=props.logging, hour_metrics=props.hour_metrics, minute_metrics=props.minute_metrics)
+
+        print('4. Set Table service properties completed')

+ 21 - 0
target/Azure/AddUp/table_advanced_samples_5.py

@@ -0,0 +1,21 @@
+def set_cors_rules(self, table_service):
+        cors_rule = CorsRule(
+            allowed_origins=['*'], 
+            allowed_methods=['POST', 'GET'],
+            allowed_headers=['*'],
+            exposed_headers=['*'],
+            max_age_in_seconds=3600)
+        
+        print('1. Get Cors Rules')
+        original_cors_rules = table_service.get_table_service_properties().cors
+
+        try:        
+            print('2. Overwrite Cors Rules')
+            table_service.set_table_service_properties(cors=[cors_rule])
+
+        finally:
+            #reverting cors rules back to the original ones
+            print('3. Revert Cors Rules back the original ones')
+            table_service.set_table_service_properties(cors=original_cors_rules)
+        
+        print("CORS sample completed")

+ 50 - 0
target/Azure/AddUp/table_advanced_samples_7.py

@@ -0,0 +1,50 @@
+def table_operations_with_sas(self, account):
+        table_name = 'sastable' + self.random_data.get_random_name(6)
+        
+        try:
+            # Create a Table Service object
+            table_service = account.create_table_service()
+            
+            print('1. Create table with name - ' + table_name)
+            table_service.create_table(table_name)
+            
+            # Create a Shared Access Signature for the table
+            print('2. Get sas for table')
+            
+            table_sas = table_service.generate_table_shared_access_signature(
+                table_name, 
+                TablePermissions.QUERY + TablePermissions.ADD + TablePermissions.UPDATE + TablePermissions.DELETE, 
+                datetime.datetime.utcnow() + datetime.timedelta(hours=1))
+
+            shared_account = TableStorageAccount(account_name=account.account_name, sas_token=table_sas, endpoint_suffix=account.endpoint_suffix)
+            shared_table_service = shared_account.create_table_service()
+
+            # Create a sample entity to insert into the table
+            customer = {'PartitionKey': 'Harp', 'RowKey': '1', 'email' : 'harp@contoso.com', 'phone' : '555-555-5555'}
+
+            # Insert the entity into the table
+            print('3. Insert new entity into table with sas - ' + table_name)
+            shared_table_service.insert_entity(table_name, customer)
+            
+            # Demonstrate how to query the entity
+            print('4. Read the inserted entity with sas.')
+            entity = shared_table_service.get_entity(table_name, 'Harp', '1')
+            
+            print(entity['email'])
+            print(entity['phone'])
+
+            # Demonstrate how to update the entity by changing the phone number
+            print('5. Update an existing entity by changing the phone number with sas')
+            customer = {'PartitionKey': 'Harp', 'RowKey': '1', 'email' : 'harp@contoso.com', 'phone' : '425-123-1234'}
+            shared_table_service.update_entity(table_name, customer)
+
+            # Demonstrate how to delete an entity
+            print('6. Delete the entity with sas')
+            shared_table_service.delete_entity(table_name, 'Harp', '1')
+
+        finally:
+            print('7. Delete table')
+            if(table_service.exists(table_name)):
+                table_service.delete_table(table_name)
+            
+        print("Table operations with sas completed")

+ 58 - 0
target/Azure/AddUp/table_basic_samples_2.py

@@ -0,0 +1,58 @@
+def run_all_samples(self, account):
+        print('Azure Storage Basic Table samples - Starting.')
+        table_name = 'tablebasics' + self.random_data.get_random_name(6)
+        table_service = None
+        try:
+            table_service = account.create_table_service()
+
+            # Create a new table
+            print('Create a table with name - ' + table_name)
+
+            try:
+                table_service.create_table(table_name)
+            except Exception as err:
+                print('Error creating table, ' + table_name + 'check if it already exists')
+ 
+            # Create a sample entity to insert into the table
+            customer = {'PartitionKey': 'Harp', 'RowKey': '1', 'email' : 'harp@contoso.com', 'phone' : '555-555-5555'}
+
+            # Insert the entity into the table
+            print('Inserting a new entity into table - ' + table_name)
+            table_service.insert_entity(table_name, customer)
+            print('Successfully inserted the new entity')
+
+            # Demonstrate how to query the entity
+            print('Read the inserted entity.')
+            entity = table_service.get_entity(table_name, 'Harp', '1')
+            print(entity['email'])
+            print(entity['phone'])
+
+            # Demonstrate how to update the entity by changing the phone number
+            print('Update an existing entity by changing the phone number')
+            customer = {'PartitionKey': 'Harp', 'RowKey': '1', 'email' : 'harp@contoso.com', 'phone' : '425-123-1234'}
+            table_service.update_entity(table_name, customer)
+
+            # Demonstrate how to query the updated entity, filter the results with a filter query and select only the value in the phone column
+            print('Read the updated entity with a filter query')
+            entities = table_service.query_entities(table_name, filter="PartitionKey eq 'Harp'", select='phone')
+            for entity in entities:
+                print(entity['phone'])
+
+            # Demonstrate how to delete an entity
+            print('Delete the entity')
+            table_service.delete_entity(table_name, 'Harp', '1')
+            print('Successfully deleted the entity')
+
+        except Exception as e:
+            if (config.IS_EMULATED):
+                print('Error occurred in the sample. If you are using the emulator, please make sure the emulator is running.', e)
+            else: 
+                print('Error occurred in the sample. Please make sure the account name and key are correct.', e)
+        finally:
+            # Demonstrate deleting the table, if you don't want to have the table deleted comment the below block of code
+            print('Deleting the table.')
+            if(table_service.exists(table_name)):
+                table_service.delete_table(table_name)
+            print('Successfully deleted the table')
+
+        print('\nAzure Storage Basic Table samples - Completed.\n')

+ 32 - 0
target/Azure/DLfile_6.py

@@ -0,0 +1,32 @@
+def upload_files():
+ adl = core.AzureDLFileSystem(adlCreds, store_name=config.store_name)
+ uploadedFolders = adl.ls(adls_upload_folder_path)
+ 
+ uploadedFolders = set([folder.replace(adls_upload_folder_path[1:], "")+"/" for folder in uploadedFolders])
+ 
+ local_folders = glob.glob(local_upload_folder_path+"*") # * means all if need specific format then *.csv
+ local_folders = set([d.replace(local_upload_folder_path, "")+"/" for d in local_folders])
+
+ to_upload_folders = local_folders.difference(uploadedFolders)
+
+ folder_names = sorted([d.replace(local_upload_folder_path, "") for d in to_upload_folders])
+
+ files = []
+ for folder in folder_names:
+  path = local_upload_folder_path+folder
+  for f in listdir(path):
+   if isfile(join(path, f)):
+    files.append(folder+f)
+
+
+ print("Uploading the following folders:<br>{}<br>Total number of files to upload:<br>{}".format(", ". join(folder_names), len(files)))
+ 
+
+ for f in files:
+  adl.put(local_upload_folder_path+f, adls_upload_folder_path+f)
+    
+
+ print("Upload finished.")
+ time.sleep(2)
+ global uploaded_files
+ uploaded_files = True

+ 12 - 0
target/Azure/add_azure_account_1.py

@@ -0,0 +1,12 @@
+def create_azure_account(env, admin_api_key, account_name, azure_ad_id, azure_app_id, azure_api_access_key, azure_subscription_id):
+	"""
+	Creates an Azure Account in CloudCheckr. It will populate it with azure subscription credentials that were provided.
+	"""
+
+	api_url = env + "/api/account.json/add_azure_inventory_account"
+
+	add_azure_account_info = json.dumps({"account_name": account_name, "azure_ad_id": azure_ad_id, "azure_app_id": azure_app_id, "azure_api_access_key": azure_api_access_key, "azure_subscription_id": azure_subscription_id})
+
+	r7 = requests.post(api_url, headers = {"Content-Type": "application/json", "access_key": admin_api_key}, data = add_azure_account_info)
+
+	print(r7.json())

+ 16 - 0
target/Azure/add_azure_account_and_set_role_assignment_1.py

@@ -0,0 +1,16 @@
+def create_azure_account(env, CloudCheckrApiKey, account_name, AzureDirectoryId, AzureCloudCheckrApplicationId,
+                         AzureCloudCheckrApplicationSecret, AzureSubscriptionId):
+    """
+    Creates an Azure Account in CloudCheckr. It will populate it with azure subscription credentials that were provided.
+    """
+
+    api_url = env + "/api/account.json/add_azure_inventory_account"
+
+    add_azure_account_info = json.dumps(
+        {"account_name": account_name, "azure_ad_id": AzureDirectoryId, "azure_app_id": AzureCloudCheckrApplicationId,
+         "azure_api_access_key": AzureCloudCheckrApplicationSecret, "azure_subscription_id": AzureSubscriptionId})
+
+    r7 = requests.post(api_url, headers={"Content-Type": "application/json", "access_key": CloudCheckrApiKey},
+                       data=add_azure_account_info)
+
+    print(r7.json())

+ 18 - 0
target/Azure/add_azure_account_and_set_role_assignment_2.py

@@ -0,0 +1,18 @@
+def get_azure_reader_role_id(AzureApiBearerToken, AzureSubscriptionId):
+    """
+    Gets the id of the reader role for this subscription.
+
+    https://docs.microsoft.com/en-us/rest/api/authorization/roleassignments/list
+    """
+
+    api_url = "https://management.azure.com/subscriptions/" + AzureSubscriptionId + "/providers/Microsoft.Authorization/roleDefinitions?api-version=2015-07-01&$filter=roleName eq 'Reader'"
+    authorization_value = "Bearer " + AzureApiBearerToken
+
+    response = requests.get(api_url, headers={"Authorization": authorization_value})
+
+    if "value" in response.json():
+        value = (response.json()["value"])[0]
+        if "id" in value:
+            return value["id"]
+    print("Failed to get the Azure Reader Role Id")
+    return None

+ 20 - 0
target/Azure/add_azure_account_and_set_role_assignment_3.py

@@ -0,0 +1,20 @@
+def get_azure_cloudcheckr_service_principal_id(AzureGraphApiBearerToken, AzureCloudCheckrApplicationName):
+    """
+    Gets the service principal id Azure Application that was specifically created for CloudCheckr.
+    Note: This is not the application id. The service principal id is required for the role assignment.
+    This uses the microsoft Graph API.
+
+    https://docs.microsoft.com/en-us/graph/api/serviceprincipal-list?view=graph-rest-1.0&tabs=http
+    """
+
+    api_url = "https://graph.microsoft.com/v1.0/servicePrincipals?$filter=displayName eq '" + AzureCloudCheckrApplicationName + "'"
+    authorization_value = "Bearer " + AzureGraphApiBearerToken
+
+    response = requests.get(api_url, headers={"Authorization": authorization_value})
+
+    if "value" in response.json():
+        value = (response.json()["value"])[0]
+        if ("id" in value) and ("appId" in value):
+            return value["id"], value["appId"]
+    print("Failed to get the Azure CloudCheckr Application Service principal Id")
+    return None

+ 26 - 0
target/Azure/add_azure_account_and_set_role_assignment_4.py

@@ -0,0 +1,26 @@
+def set_azure_cloudcheckr_application_service_assignment(AzureApiBearerToken, AzureReaderRoleId,
+                                                         AzureCloudCheckrApplicationServicePrincipalId,
+                                                         AzureSubscriptionId):
+    """
+    Sets the previously created CloudCheckr application to have a reader role assignment.
+
+    https://docs.microsoft.com/en-us/azure/role-based-access-control/role-assignments-rest
+    """
+
+    RoleAssignmentId = str(uuid.uuid1())
+
+    api_url = "https://management.azure.com/subscriptions/" + AzureSubscriptionId + "/providers/Microsoft.Authorization/roleAssignments/" + RoleAssignmentId + "?api-version=2015-07-01"
+    authorization_value = "Bearer " + AzureApiBearerToken
+    role_assignment_data = json.dumps({"properties": {"principalId": AzureCloudCheckrApplicationServicePrincipalId,
+                                                      "roleDefinitionId": AzureReaderRoleId}})
+
+    response = requests.put(api_url, headers={"Authorization": authorization_value, "Content-Type": "application/json"},
+                            data=role_assignment_data)
+    print(response.json())
+
+    if "properties" in response.json():
+        properties = response.json()["properties"]
+        if "roleDefinitionId" in properties:
+            return properties["roleDefinitionId"]
+    print("Failed to set role assignment for the CloudCheckr Application to the specified subscription")
+    return None

+ 20 - 0
target/Azure/add_azure_account_and_set_role_assignment_5.py

@@ -0,0 +1,20 @@
+def get_azure_bearer_token(resource_url, azure_directory_id, azure_admin_application_id,
+                           azure_admin_application_secret):
+    """
+    Uses OAuth 2.0 to get the bearer token based on the client id and client secret.
+    """
+
+    api_url = "https://login.microsoftonline.com/" + azure_directory_id + "/oauth2/token"
+
+    client = {'grant_type': 'client_credentials',
+              'client_id': azure_admin_application_id,
+              'client_secret': azure_admin_application_secret,
+              'resource': resource_url,
+              }
+
+    response = requests.post(api_url, data=client)
+
+    if "access_token" in response.json():
+        return response.json()["access_token"]
+    print("Could not get Bearer token")
+    return None

+ 63 - 0
target/Azure/add_azure_account_and_set_role_assignment_6.py

@@ -0,0 +1,63 @@
+def main():
+    try:
+        CloudCheckrApiKey = str(sys.argv[1])
+    except IndexError:
+        print("Must include an admin api key in the command line")
+        return
+
+    try:
+        NameOfCloudCheckrAccount = str(sys.argv[2])
+    except IndexError:
+        print("Must include a cloudcheckr account name")
+        return
+
+    try:
+        AzureDirectoryId = str(sys.argv[3])
+    except IndexError:
+        print("Must include an Azure Directory Id")
+        return
+
+    try:
+        AzureSubscriptionId = str(sys.argv[4])
+    except IndexError:
+        print("Must include an Azure Subscription Id")
+        return
+
+    try:
+        AzureAdminApplicationId = str(sys.argv[5])
+    except IndexError:
+        print("Must include an Azure Admin ApplictApi Id")
+        return
+
+    try:
+        AzureAdminApplicationSecret = str(sys.argv[6])
+    except IndexError:
+        print("Must include an Azure Admin Application Secret")
+        return
+
+    try:
+        AzureCloudCheckrApplicationName = str(sys.argv[7])
+    except IndexError:
+        print("Must include an Azure CloudCheckr Application Name")
+        return
+
+    try:
+        AzureCloudCheckrApplicationSecret = str(sys.argv[8])
+    except IndexError:
+        print("Must include an Azure CloudCheckr Application Secret")
+        return
+
+    env = "https://glacier.cloudcheckr.com"
+
+    AzureApiBearerToken = get_azure_bearer_token("https://management.azure.com/", AzureDirectoryId,
+                                                 AzureAdminApplicationId, AzureAdminApplicationSecret)
+    AzureGraphApiBearerToken = get_azure_bearer_token("https://graph.microsoft.com/", AzureDirectoryId,
+                                                      AzureAdminApplicationId, AzureAdminApplicationSecret)
+    AzureReaderRoleId = get_azure_reader_role_id(AzureApiBearerToken, AzureSubscriptionId)
+    AzureCloudCheckrApplicationServicePrincipalId, AzureCloudCheckrApplicationId = get_azure_cloudcheckr_service_principal_id(
+        AzureGraphApiBearerToken, AzureCloudCheckrApplicationName)
+    set_azure_cloudcheckr_application_service_assignment(AzureApiBearerToken, AzureReaderRoleId,
+                                                         AzureCloudCheckrApplicationServicePrincipalId,
+                                                         AzureSubscriptionId)
+    create_azure_account(env, CloudCheckrApiKey, NameOfCloudCheckrAccount, AzureDirectoryId,
+                         AzureCloudCheckrApplicationId, AzureCloudCheckrApplicationSecret, AzureSubscriptionId)

+ 3 - 0
target/Azure/adls_2.py

@@ -0,0 +1,3 @@
+def execute(self, context: "Context") -> Any:
+        hook = AzureDataLakeHook(azure_data_lake_conn_id=self.azure_data_lake_conn_id)
+        return hook.remove(path=self.path, recursive=self.recursive, ignore_not_found=self.ignore_not_found)

+ 4 - 0
target/Azure/adls_4.py

@@ -0,0 +1,4 @@
+def execute(self, context: "Context") -> list:
+        hook = AzureDataLakeHook(azure_data_lake_conn_id=self.azure_data_lake_conn_id)
+        self.log.info('Getting list of ADLS files in path: %s', self.path)
+        return hook.list(path=self.path)

+ 10 - 0
target/Azure/azure_clients_1.py

@@ -0,0 +1,10 @@
+def get_resourcegroup_client(parameters):
+    tenant_id = parameters.get('azure_tenant_id')
+    client_id = parameters.get('azure_client_id')
+    secret = parameters.get('azure_client_secret')
+    subscription_id = parameters.get('azure_subscription_id')
+
+    token_credential = ClientSecretCredential(
+        tenant_id, client_id, secret)
+    resourcegroup_client = ResourceManagementClient(token_credential, subscription_id)
+    return resourcegroup_client

+ 11 - 0
target/Azure/azure_clients_2.py

@@ -0,0 +1,11 @@
+def get_compute_client(parameters):
+    tenant_id = parameters.get('azure_tenant_id')
+    client_id = parameters.get('azure_client_id')
+    secret = parameters.get('azure_client_secret')
+    subscription_id = parameters.get('azure_subscription_id')
+
+    token_credential = ClientSecretCredential(
+        tenant_id, client_id, secret)
+    compute_client = ComputeManagementClient(token_credential,
+                                             subscription_id)
+    return compute_client

+ 11 - 0
target/Azure/azure_clients_3.py

@@ -0,0 +1,11 @@
+def get_network_client(parameters):
+    tenant_id = parameters.get('azure_tenant_id')
+    client_id = parameters.get('azure_client_id')
+    secret = parameters.get('azure_client_secret')
+    subscription_id = parameters.get('azure_subscription_id')
+
+    token_credential = ClientSecretCredential(
+        tenant_id, client_id, secret)
+    network_client = NetworkManagementClient(token_credential,
+                                             subscription_id)
+    return network_client

+ 11 - 0
target/Azure/azure_clients_4.py

@@ -0,0 +1,11 @@
+def get_dns_client(parameters):
+    tenant_id = parameters.get('azure_tenant_id')
+    client_id = parameters.get('azure_client_id')
+    secret = parameters.get('azure_client_secret')
+    subscription_id = parameters.get('azure_subscription_id')
+
+    token_credential = ClientSecretCredential(
+        tenant_id, client_id, secret)
+    dns_client = PrivateDnsManagementClient(token_credential,
+                                            subscription_id)
+    return dns_client

+ 11 - 0
target/Azure/azure_clients_5.py

@@ -0,0 +1,11 @@
+def get_dns_ops_client(parameters):
+    tenant_id = parameters.get('azure_tenant_id')
+    client_id = parameters.get('azure_client_id')
+    secret = parameters.get('azure_client_secret')
+    subscription_id = parameters.get('azure_subscription_id')
+
+    token_credential = ClientSecretCredential(
+        tenant_id, client_id, secret)
+    dns_ops_client = DnsManagementClient(token_credential,
+                                            subscription_id)
+    return dns_ops_client

+ 11 - 0
target/Azure/azure_clients_6.py

@@ -0,0 +1,11 @@
+def get_blob_service_client(parameters):
+    tenant_id = parameters.get('azure_tenant_id')
+    client_id = parameters.get('azure_client_id')
+    secret = parameters.get('azure_client_secret')
+    account_name = parameters.get('storage_account_name')
+    token_credential = ClientSecretCredential(
+        tenant_id, client_id, secret)
+    blob_service_client = BlobServiceClient(
+        account_url="https://%s.blob.core.windows.net" % account_name,
+        credential=token_credential)
+    return blob_service_client

+ 11 - 0
target/Azure/azure_clients_7.py

@@ -0,0 +1,11 @@
+def get_queue_service_client(parameters):
+    tenant_id = parameters.get('azure_tenant_id')
+    client_id = parameters.get('azure_client_id')
+    secret = parameters.get('azure_client_secret')
+    account_name = parameters.get('storage_account_name')
+    token_credential = ClientSecretCredential(
+        tenant_id, client_id, secret)
+    queue_service_client = QueueServiceClient(
+        account_url="https://%s.queue.core.windows.net" % account_name,
+        credential=token_credential)
+    return queue_service_client

+ 13 - 0
target/Azure/azure_clients_8.py

@@ -0,0 +1,13 @@
+def get_datalake_client(parameters):
+    tenant_id = parameters.get('azure_tenant_id')
+    client_id = parameters.get('azure_client_id')
+    secret = parameters.get('azure_client_secret')
+    subscription_id = parameters.get('azure_subscription_id')
+    credentials = ServicePrincipalCredentials(
+        client_id=client_id,
+        secret=secret,
+        tenant=tenant_id)
+
+    datalake_client = DataLakeStoreAccountManagementClient(credentials,
+                                                           subscription_id)
+    return datalake_client

+ 11 - 0
target/Azure/azure_clients_9.py

@@ -0,0 +1,11 @@
+def get_storage_client(parameters):
+    tenant_id = parameters.get('azure_tenant_id')
+    client_id = parameters.get('azure_client_id')
+    secret = parameters.get('azure_client_secret')
+    subscription_id = parameters.get('azure_subscription_id')
+
+    token_credential = ClientSecretCredential(
+        tenant_id, client_id, secret)
+    storage_client = StorageManagementClient(token_credential,
+                                             subscription_id)
+    return storage_client

+ 25 - 0
target/Azure/azure_rm_14.py

@@ -0,0 +1,25 @@
+def get_inventory(self):
+        if len(self.resource_groups) > 0:
+            # get VMs for requested resource groups
+            for resource_group in self.resource_groups:
+                try:
+                    virtual_machines = self._compute_client.virtual_machines.list(resource_group)
+                except Exception as exc:
+                    sys.exit("Error: fetching virtual machines for resource group {0} - {1}".format(resource_group, str(exc)))
+                if self._args.host or self.tags:
+                    selected_machines = self._selected_machines(virtual_machines)
+                    self._load_machines(selected_machines)
+                else:
+                    self._load_machines(virtual_machines)
+        else:
+            # get all VMs within the subscription
+            try:
+                virtual_machines = self._compute_client.virtual_machines.list_all()
+            except Exception as exc:
+                sys.exit("Error: fetching virtual machines - {0}".format(str(exc)))
+
+            if self._args.host or self.tags or self.locations:
+                selected_machines = self._selected_machines(virtual_machines)
+                self._load_machines(selected_machines)
+            else:
+                self._load_machines(virtual_machines)

+ 98 - 0
target/Azure/azure_rm_15.py

@@ -0,0 +1,98 @@
+def _load_machines(self, machines):
+        for machine in machines:
+            id_dict = azure_id_to_dict(machine.id)
+
+            # TODO - The API is returning an ID value containing resource group name in ALL CAPS. If/when it gets
+            #       fixed, we should remove the .lower(). Opened Issue
+            #       #574: https://github.com/Azure/azure-sdk-for-python/issues/574
+            resource_group = id_dict['resourceGroups'].lower()
+
+            if self.group_by_security_group:
+                self._get_security_groups(resource_group)
+
+            host_vars = dict(
+                ansible_host=None,
+                private_ip=None,
+                private_ip_alloc_method=None,
+                public_ip=None,
+                public_ip_name=None,
+                public_ip_id=None,
+                public_ip_alloc_method=None,
+                fqdn=None,
+                location=machine.location,
+                name=machine.name,
+                type=machine.type,
+                id=machine.id,
+                tags=machine.tags,
+                network_interface_id=None,
+                network_interface=None,
+                resource_group=resource_group,
+                mac_address=None,
+                plan=(machine.plan.name if machine.plan else None),
+                virtual_machine_size=machine.hardware_profile.vm_size,
+                computer_name=(machine.os_profile.computer_name if machine.os_profile else None),
+                provisioning_state=machine.provisioning_state,
+            )
+
+            host_vars['os_disk'] = dict(
+                name=machine.storage_profile.os_disk.name,
+                operating_system_type=machine.storage_profile.os_disk.os_type.value
+            )
+
+            if self.include_powerstate:
+                host_vars['powerstate'] = self._get_powerstate(resource_group, machine.name)
+
+            if machine.storage_profile.image_reference:
+                host_vars['image'] = dict(
+                    offer=machine.storage_profile.image_reference.offer,
+                    publisher=machine.storage_profile.image_reference.publisher,
+                    sku=machine.storage_profile.image_reference.sku,
+                    version=machine.storage_profile.image_reference.version
+                )
+
+            # Add windows details
+            if machine.os_profile is not None and machine.os_profile.windows_configuration is not None:
+                host_vars['windows_auto_updates_enabled'] = \
+                    machine.os_profile.windows_configuration.enable_automatic_updates
+                host_vars['windows_timezone'] = machine.os_profile.windows_configuration.time_zone
+                host_vars['windows_rm'] = None
+                if machine.os_profile.windows_configuration.win_rm is not None:
+                    host_vars['windows_rm'] = dict(listeners=None)
+                    if machine.os_profile.windows_configuration.win_rm.listeners is not None:
+                        host_vars['windows_rm']['listeners'] = []
+                        for listener in machine.os_profile.windows_configuration.win_rm.listeners:
+                            host_vars['windows_rm']['listeners'].append(dict(protocol=listener.protocol,
+                                                                             certificate_url=listener.certificate_url))
+
+            for interface in machine.network_profile.network_interfaces:
+                interface_reference = self._parse_ref_id(interface.id)
+                network_interface = self._network_client.network_interfaces.get(
+                    interface_reference['resourceGroups'],
+                    interface_reference['networkInterfaces'])
+                if network_interface.primary:
+                    if self.group_by_security_group and \
+                       self._security_groups[resource_group].get(network_interface.id, None):
+                        host_vars['security_group'] = \
+                            self._security_groups[resource_group][network_interface.id]['name']
+                        host_vars['security_group_id'] = \
+                            self._security_groups[resource_group][network_interface.id]['id']
+                    host_vars['network_interface'] = network_interface.name
+                    host_vars['network_interface_id'] = network_interface.id
+                    host_vars['mac_address'] = network_interface.mac_address
+                    for ip_config in network_interface.ip_configurations:
+                        host_vars['private_ip'] = ip_config.private_ip_address
+                        host_vars['private_ip_alloc_method'] = ip_config.private_ip_allocation_method
+                        if ip_config.public_ip_address:
+                            public_ip_reference = self._parse_ref_id(ip_config.public_ip_address.id)
+                            public_ip_address = self._network_client.public_ip_addresses.get(
+                                public_ip_reference['resourceGroups'],
+                                public_ip_reference['publicIPAddresses'])
+                            host_vars['ansible_host'] = public_ip_address.ip_address
+                            host_vars['public_ip'] = public_ip_address.ip_address
+                            host_vars['public_ip_name'] = public_ip_address.name
+                            host_vars['public_ip_alloc_method'] = public_ip_address.public_ip_allocation_method
+                            host_vars['public_ip_id'] = public_ip_address.id
+                            if public_ip_address.dns_settings:
+                                host_vars['fqdn'] = public_ip_address.dns_settings.fqdn
+
+            self._add_host(host_vars)

+ 59 - 0
target/Azure/azure_rm_2.py

@@ -0,0 +1,59 @@
+def __init__(self, args):
+        self._args = args
+        self._cloud_environment = None
+        self._compute_client = None
+        self._resource_client = None
+        self._network_client = None
+
+        self.debug = False
+        if args.debug:
+            self.debug = True
+
+        self.credentials = self._get_credentials(args)
+        if not self.credentials:
+            self.fail("Failed to get credentials. Either pass as parameters, set environment variables, "
+                      "or define a profile in ~/.azure/credentials.")
+
+        # if cloud_environment specified, look up/build Cloud object
+        raw_cloud_env = self.credentials.get('cloud_environment')
+        if not raw_cloud_env:
+            self._cloud_environment = azure_cloud.AZURE_PUBLIC_CLOUD  # SDK default
+        else:
+            # try to look up "well-known" values via the name attribute on azure_cloud members
+            all_clouds = [x[1] for x in inspect.getmembers(azure_cloud) if isinstance(x[1], azure_cloud.Cloud)]
+            matched_clouds = [x for x in all_clouds if x.name == raw_cloud_env]
+            if len(matched_clouds) == 1:
+                self._cloud_environment = matched_clouds[0]
+            elif len(matched_clouds) > 1:
+                self.fail("Azure SDK failure: more than one cloud matched for cloud_environment name '{0}'".format(raw_cloud_env))
+            else:
+                if not urlparse.urlparse(raw_cloud_env).scheme:
+                    self.fail("cloud_environment must be an endpoint discovery URL or one of {0}".format([x.name for x in all_clouds]))
+                try:
+                    self._cloud_environment = azure_cloud.get_cloud_from_metadata_endpoint(raw_cloud_env)
+                except Exception as e:
+                    self.fail("cloud_environment {0} could not be resolved: {1}".format(raw_cloud_env, e.message))
+
+        if self.credentials.get('subscription_id', None) is None:
+            self.fail("Credentials did not include a subscription_id value.")
+        self.log("setting subscription_id")
+        self.subscription_id = self.credentials['subscription_id']
+
+        if self.credentials.get('client_id') is not None and \
+           self.credentials.get('secret') is not None and \
+           self.credentials.get('tenant') is not None:
+            self.azure_credentials = ServicePrincipalCredentials(client_id=self.credentials['client_id'],
+                                                                 secret=self.credentials['secret'],
+                                                                 tenant=self.credentials['tenant'],
+                                                                 cloud_environment=self._cloud_environment)
+        elif self.credentials.get('ad_user') is not None and self.credentials.get('password') is not None:
+            tenant = self.credentials.get('tenant')
+            if not tenant:
+                tenant = 'common'
+            self.azure_credentials = UserPassCredentials(self.credentials['ad_user'],
+                                                         self.credentials['password'],
+                                                         tenant=tenant,
+                                                         cloud_environment=self._cloud_environment)
+        else:
+            self.fail("Failed to authenticate with provided credentials. Some attributes were missing. "
+                      "Credentials must include client_id, secret and tenant or ad_user and password.")

+ 11 - 0
target/Azure/azure_rm_9.py

@@ -0,0 +1,11 @@
+def network_client(self):
+        self.log('Getting network client')
+        if not self._network_client:
+            self._network_client = NetworkManagementClient(
+                self.azure_credentials,
+                self.subscription_id,
+                base_url=self._cloud_environment.endpoints.resource_manager,
+                api_version='2017-06-01'
+            )
+            self._register('Microsoft.Network')
+        return self._network_client

+ 17 - 0
target/Azure/azure_rm_aks_facts_4.py

@@ -0,0 +1,17 @@
+def list_items(self):
+        """Get all Azure Kubernetes Services"""
+
+        self.log('List all Azure Kubernetes Services')
+
+        try:
+            response = self.containerservice_client.managed_clusters.list(
+                self.resource_group)
+        except AzureHttpError as exc:
+            self.fail('Failed to list all items - {0}'.format(str(exc)))
+
+        results = []
+        for item in response:
+            if self.has_tags(item.tags, self.tags):
+                results.append(self.serialize_obj(item, AZURE_OBJECT_CLASS))
+
+        return results

+ 32 - 0
target/Azure/azure_service_principal_attribute_1.py

@@ -0,0 +1,32 @@
+def run(self, terms, variables, **kwargs):
+
+        self.set_options(direct=kwargs)
+
+        credentials = {}
+        credentials['azure_client_id'] = self.get_option('azure_client_id', None)
+        credentials['azure_secret'] = self.get_option('azure_secret', None)
+        credentials['azure_tenant'] = self.get_option('azure_tenant', 'common')
+
+        if credentials['azure_client_id'] is None or credentials['azure_secret'] is None:
+            raise AnsibleError("Must specify azure_client_id and azure_secret")
+
+        _cloud_environment = azure_cloud.AZURE_PUBLIC_CLOUD
+        if self.get_option('azure_cloud_environment', None) is not None:
+            cloud_environment = azure_cloud.get_cloud_from_metadata_endpoint(credentials['azure_cloud_environment'])
+
+        try:
+            azure_credentials = ServicePrincipalCredentials(client_id=credentials['azure_client_id'],
+                                                            secret=credentials['azure_secret'],
+                                                            tenant=credentials['azure_tenant'],
+                                                            resource=_cloud_environment.endpoints.active_directory_graph_resource_id)
+
+            client = GraphRbacManagementClient(azure_credentials, credentials['azure_tenant'],
+                                               base_url=_cloud_environment.endpoints.active_directory_graph_resource_id)
+
+            response = list(client.service_principals.list(filter="appId eq '{0}'".format(credentials['azure_client_id'])))
+            sp = response[0]
+
+            return sp.object_id.split(',')
+        except CloudError as ex:
+            raise AnsibleError("Failed to get service principal object id: %s" % to_native(ex))
+        return False

+ 7 - 0
target/Azure/azure_storage_11.py

@@ -0,0 +1,7 @@
+def size(self, name):
+        """
+        :param name:
+        :rtype: int
+        """
+        blob = self.connection.get_blob_properties(self.azure_container, name)
+        return blob.properties.content_length

+ 28 - 0
target/Azure/azure_storage_12.py

@@ -0,0 +1,28 @@
+def _save(self, name, content):
+        """
+        :param name:
+        :param File content:
+        :return:
+        """
+        original_name = name.get("original_name")
+        blob_file_name = datetime.now().strftime("%Y%m%d-%H:%M:%S.%f_") + original_name
+        # blob_name = "{}.{}".format(name.get("uuid"), original_name.partition(".")[-1])
+
+        if hasattr(content.file, 'content_type'):
+            content_type = content.file.content_type
+        else:
+            content_type = mimetypes.guess_type(original_name)
+
+        if hasattr(content, 'chunks'):
+            content_data = b''.join(chunk for chunk in content.chunks())
+        else:
+            content_data = content.read()
+
+        print(f'Saving blob: container={self.azure_container}, blob={blob_file_name}')
+        blob_client = self.connection.get_blob_client(container=self.azure_container, blob=blob_file_name)
+        obj = blob_client.upload_blob(content_data)
+        # create_blob_from_bytes(self.azure_container, name, content_data,
+        #
+        #                                        content_settings=ContentSettings(content_type=content_type))
+        af = AttachedFile(original_name, self.azure_container, blob_file_name)
+        return af

+ 15 - 0
target/Azure/azure_storage_5.py

@@ -0,0 +1,15 @@
+def connection(self):
+
+        if self._connection is None:
+            connect_str = setting("AZURE_STORAGE_CONNECTION_STRING")
+
+            # Create the BlobServiceClient object which will be used to create a container client
+            blob_service_client = BlobServiceClient.from_connection_string(connect_str)
+
+            # Create a unique name for the container
+            container_name = "pac-files"
+
+            # Create a blob client using the local file name as the name for the blob
+            self._connection = blob_service_client
+
+        return self._connection

+ 10 - 0
target/Azure/azure_storage_8.py

@@ -0,0 +1,10 @@
+def _open(self, container, name, mode="rb"):
+        """
+        :param str name: Filename
+        :param str mode:
+        :rtype: ContentFile
+        """
+        print(f'Retrieving blob: container={self.azure_container}, blob={name}')
+        blob_client = self.connection.get_blob_client(container=container, blob=name)
+        contents = blob_client.download_blob().readall()
+        return ContentFile(contents)

+ 25 - 0
target/Azure/azure_system_helpers_2.py

@@ -0,0 +1,25 @@
+def provide_azure_data_lake_default_connection(key_file_path: str):
+    """
+    Context manager to provide a temporary value for azure_data_lake_default connection
+    :param key_file_path: Path to file with azure_data_lake_default credentials .json file.
+    """
+    required_fields = {'login', 'password', 'extra'}
+
+    if not key_file_path.endswith(".json"):
+        raise AirflowException("Use a JSON key file.")
+    with open(key_file_path) as credentials:
+        creds = json.load(credentials)
+    missing_keys = required_fields - creds.keys()
+    if missing_keys:
+        message = f"{missing_keys} fields are missing"
+        raise AirflowException(message)
+    conn = Connection(
+        conn_id=DATA_LAKE_CONNECTION_ID,
+        conn_type=DATA_LAKE_CONNECTION_TYPE,
+        host=creds.get("host", None),
+        login=creds.get("login", None),
+        password=creds.get("password", None),
+        extra=json.dumps(creds.get('extra', None)),
+    )
+    with patch_environ({f"AIRFLOW_CONN_{conn.conn_id.upper()}": conn.get_uri()}):
+        yield

+ 9 - 0
target/Azure/azure_system_helpers_3.py

@@ -0,0 +1,9 @@
+def provide_azure_fileshare(share_name: str, azure_fileshare_conn_id: str, file_name: str, directory: str):
+    AzureSystemTest.prepare_share(
+        share_name=share_name,
+        azure_fileshare_conn_id=azure_fileshare_conn_id,
+        file_name=file_name,
+        directory=directory,
+    )
+    yield
+    AzureSystemTest.delete_share(share_name=share_name, azure_fileshare_conn_id=azure_fileshare_conn_id)

+ 3 - 0
target/Azure/azure_system_helpers_4.py

@@ -0,0 +1,3 @@
+def create_share(cls, share_name: str, azure_fileshare_conn_id: str):
+        hook = AzureFileShareHook(azure_fileshare_conn_id=azure_fileshare_conn_id)
+        hook.create_share(share_name)

+ 3 - 0
target/Azure/azure_system_helpers_5.py

@@ -0,0 +1,3 @@
+def delete_share(cls, share_name: str, azure_fileshare_conn_id: str):
+        hook = AzureFileShareHook(azure_fileshare_conn_id=azure_fileshare_conn_id)
+        hook.delete_share(share_name)

+ 3 - 0
target/Azure/azure_system_helpers_6.py

@@ -0,0 +1,3 @@
+def create_directory(cls, share_name: str, azure_fileshare_conn_id: str, directory: str):
+        hook = AzureFileShareHook(azure_fileshare_conn_id=azure_fileshare_conn_id)
+        hook.create_directory(share_name=share_name, directory_name=directory)

+ 15 - 0
target/Azure/azure_system_helpers_7.py

@@ -0,0 +1,15 @@
+def upload_file_from_string(
+        cls,
+        string_data: str,
+        share_name: str,
+        azure_fileshare_conn_id: str,
+        file_name: str,
+        directory: str,
+    ):
+        hook = AzureFileShareHook(azure_fileshare_conn_id=azure_fileshare_conn_id)
+        hook.load_string(
+            string_data=string_data,
+            share_name=share_name,
+            directory_name=directory,
+            file_name=file_name,
+        )

+ 16 - 0
target/Azure/azure_system_helpers_8.py

@@ -0,0 +1,16 @@
+def prepare_share(cls, share_name: str, azure_fileshare_conn_id: str, file_name: str, directory: str):
+        """
+        Create share with a file in given directory. If directory is None, file is in root dir.
+        """
+        cls.create_share(share_name=share_name, azure_fileshare_conn_id=azure_fileshare_conn_id)
+        cls.create_directory(
+            share_name=share_name, azure_fileshare_conn_id=azure_fileshare_conn_id, directory=directory
+        )
+        string_data = "".join(random.choice(string.ascii_letters) for _ in range(1024))
+        cls.upload_file_from_string(
+            string_data=string_data,
+            share_name=share_name,
+            azure_fileshare_conn_id=azure_fileshare_conn_id,
+            file_name=file_name,
+            directory=directory,
+        )

+ 22 - 0
target/Azure/blob-adapter_2.py

@@ -0,0 +1,22 @@
+def upload(self, file_dict):
+        upload_response = {}
+        for key in file_dict:
+            print("File Dict Key: [{}] value is: {}".format(key, file_dict[key]))
+            print("\nUploading to Azure Storage as blob:\n\t" + key)
+
+            self.blob_client = self.blob_service_client.get_blob_client(container=self.get_config('container_name'), blob=key)
+            with open(file_dict[key], "rb") as data:
+                try:
+                    self.blob_client.upload_blob(data)
+                    print('File: Uploaded Successfully: {}'.format(key))
+                    upload_response[key] = 'Successfully Uploaded'
+                except ResourceExistsError:
+                    print('File: NOT Uploaded Successfully: {}'.format(key))
+                    upload_response[key] = 'This Resource already exists'
+                    upload_response['Partial'] = True
+                    print('This Resource already exists')
+                    # return 'This Resource already exists'
+        print("Before Returning Response:")
+        print(jsonify(upload_response))
+        print("---------------")
+        return upload_response

+ 4 - 0
target/Azure/blob-adapter_3.py

@@ -0,0 +1,4 @@
+def get_blob_client(self, blob_name):
+        self.blob_client = self.blob_service_client.get_blob_client(
+            container=self.get_config('container_name'), blob=blob_name)
+        return self.blob_client

+ 10 - 0
target/Azure/blob-adapter_4.py

@@ -0,0 +1,10 @@
+def list_blobs(self):
+        print("\nList blobs in the container")
+        self.container_client = self.blob_service_client.get_container_client(
+            container=self.get_config('container_name'))
+        blob_list = self.container_client.list_blobs()
+        blobs = []
+        for blob in blob_list:
+            # print("\t Blob name: " + blob.name)
+            blobs.append(blob.name)
+        return blobs

+ 17 - 0
target/Azure/blob-permission_3.py

@@ -0,0 +1,17 @@
+def create_blob_link(self, blob_folder, blob_name) -> str:
+        if blob_folder:
+            full_path_blob = f"{blob_folder}/{blob_name}"
+        else:
+            full_path_blob = blob_name
+        url = f"https://{self.account_name}.blob.core.windows.net/{self.destination}/{full_path_blob}"
+        sas_token = generate_blob_sas(
+            account_name=self.account_name,
+            account_key=self.account_key,
+            container_name=self.destination,
+            blob_name=full_path_blob,
+            permission=BlobSasPermissions(read=True, delete_previous_version=False),
+            expiry=datetime.utcnow() + timedelta(days=self.expiry_download_links),
+        )
+
+        url_with_sas = f"{url}?{sas_token}"
+        return url_with_sas

+ 46 - 0
target/Azure/blob-upload-1_3.py

@@ -0,0 +1,46 @@
+def upload_single(self):
+        blob_service_client = BlobServiceClient.from_connection_string(self.connection_string)
+        download_links = {}
+
+        for root, dirs, files in os.walk(self.folder):
+            for file in files:
+
+                full_path = os.path.join(root, file)
+
+                # ignore hidden files
+                if file.startswith("."):
+                    continue
+
+                # if list_files is given, only upload matched files
+                if self.list_files and file not in self.list_files:
+                    continue
+
+                # if extension is given only upload if extension is matched
+                if self.extension and os.path.isfile(full_path) and not file.lower().endswith(self.extension.lower()):
+                    continue
+
+                blob_folder = root.replace(self.folder, "").lstrip("/")
+
+                if self.blob_folder:
+                    # we only want to append blob_folder if it actually is a path or folder
+                    # blob_folder can be empty string ""
+                    if blob_folder:
+                        blob_folder = os.path.join(self.blob_folder, blob_folder)
+                    else:
+                        blob_folder = self.blob_folder
+
+                # if no folder is given, just upload to the container root path
+                if not blob_folder:
+                    container = self.destination
+                else:
+                    container = os.path.join(self.destination, blob_folder)
+                container_client = blob_service_client.get_container_client(container=container)
+
+                with open(full_path, "rb") as data:
+                    logging.debug(f"Uploading blob {full_path}")
+                    container_client.upload_blob(data=data, name=file, overwrite=self.overwrite)
+
+                if self.create_download_links:
+                    download_links[file] = self.create_blob_link(blob_folder=blob_folder, blob_name=file)
+
+        return download_links

+ 8 - 0
target/Azure/blob-upload-1_4.py

@@ -0,0 +1,8 @@
+def upload(self):
+        self.checks()
+
+        logging.info(f"Uploading to container {self.destination} with method = '{self.method}'.")
+        if self.method == "batch":
+            return self.upload_batch()
+        else:
+            return self.upload_single()

+ 12 - 0
target/Azure/blob-upload-2_4.py

@@ -0,0 +1,12 @@
+def upload_image(self, file_name):
+        # Create blob with same name as local file name
+        blob_client = self.blob_service_client.get_blob_client(container=MY_IMAGE_CONTAINER,
+                                                               blob=file_name)
+        # Get full path to the file
+        upload_file_path = os.path.join(LOCAL_IMAGE_PATH, file_name)
+        # Create blob on storage
+        # Overwrite if it already exists!
+        image_content_setting = ContentSettings(content_type='image/jpeg')
+        print(f"uploading file - {file_name}")
+        with open(upload_file_path, "rb") as data:
+            blob_client.upload_blob(data, overwrite=True, content_settings=image_content_setting)

+ 18 - 0
target/Azure/blob-upload-3_3.py

@@ -0,0 +1,18 @@
+def upldfile():
+    if request.method == 'POST':
+        file = request.files['file']
+        if file and allowed_file(file.filename):
+            filename = secure_filename(file.filename)
+            app.logger.info('FileName: ' + filename)
+            
+            block_blob_service = BlockBlobService(account_name=app.config['AZURE_STORAGE_ACCOUNT'], account_key=app.config['AZURE_STORAGE_KEY'])
+            block_blob_service.create_blob_from_bytes(
+                'doc',
+                filename,
+                file.read())
+            
+#             updir = os.path.join(basedir, 'upload/')
+#             file.save(os.path.join(updir, filename))
+#             file_size = os.path.getsize(os.path.join(updir, filename))
+            return jsonify(name=filename, url='https://'+app.config['AZURE_STORAGE_ACCOUNT']+'.blob.core.windows.net/' \
+                           +app.config['AZURE_STORAGE_CONTAINER']+'/'+filename)

+ 5 - 0
target/Azure/blob-upload-4_1.py

@@ -0,0 +1,5 @@
+def upload_file_to_blob(upload_file_path, target): #file path - >file path
+    blob_client = blob_service_client.get_blob_client(container=container_name, blob=target)
+    print("\nUploading to Azure Storage as blob:\n\t" + upload_file_path)
+    with open(upload_file_path, "rb") as data:
+        blob_client.upload_blob(data)

+ 9 - 0
target/Azure/blob-upload-4_2.py

@@ -0,0 +1,9 @@
+def upload_directory_to_blob(upload_file_path, target): #directory name -> directory name
+    print("\nUploading directory to Azure Storage as blob:\n\t" + upload_file_path)
+    files = os.listdir(upload_file_path)
+    for dir in files:
+        file_name = upload_file_path + '/' + dir
+        target_ = target+ '/' + dir
+        blob_client = blob_service_client.get_blob_client(container=container_name, blob=target_)
+        with open(file_name, "rb") as data:
+            blob_client.upload_blob(data)

+ 6 - 0
target/Azure/blob-upload-4_3.py

@@ -0,0 +1,6 @@
+def download_file_from_blob(source, download_file_path):
+    blob_client = blob_service_client.get_blob_client(container=container_name, blob=source)
+    print("\nDownloading blob to \n\t from container" + download_file_path)
+
+    with open(download_file_path, "wb") as download_file:
+        download_file.write(blob_client.download_blob().readall())

+ 10 - 0
target/Azure/blob-upload-4_4.py

@@ -0,0 +1,10 @@
+def download_directory_from_blob(source, download_directory_path):
+    container_client = ContainerClient.from_connection_string(conn_str=connect_str, container_name=container_name)
+    print(f"\nDownloading all blobs from the following directory {source} in container {container_name}")
+    blob_list = container_client.list_blobs()
+    for blob in blob_list:
+        if source in blob.name:
+            blob_client = blob_service_client.get_blob_client(container=container_name, blob=blob.name)
+            os.makedirs(os.path.dirname(blob.name), exist_ok=True)
+            with open(blob.name, "wb") as download_file:
+                download_file.write(blob_client.download_blob().readall())

+ 58 - 0
target/Azure/blob-upload_1.py

@@ -0,0 +1,58 @@
+def run_sample():
+    try:
+        # Create the BlockBlobService that is used to call the Blob service for the storage account
+        blob_service_client = BlockBlobService(
+            account_name='accountname', account_key='accountkey')
+
+        # Create a container called 'quickstartblobs'.
+        container_name = 'quickstartblobs'
+        blob_service_client.create_container(container_name)
+
+        # Set the permission so the blobs are public.
+        blob_service_client.set_container_acl(
+            container_name, public_access=PublicAccess.Container)
+
+        # Create Sample folder if it not exists, and create a file in folder Sample to test the upload and download.
+        local_path = os.path.expanduser("~/Sample")
+        if not os.path.exists(local_path):
+            os.makedirs(os.path.expanduser("~/Sample"))
+        local_file_name = "QuickStart_" + str(uuid.uuid4()) + ".txt"
+        full_path_to_file = os.path.join(local_path, local_file_name)
+
+        # Write text to the file.
+        file = open(full_path_to_file,  'w')
+        file.write("Hello, World!")
+        file.close()
+
+        print("Temp file = " + full_path_to_file)
+        print("\nUploading to Blob storage as blob" + local_file_name)
+
+        # Upload the created file, use local_file_name for the blob name
+        blob_service_client.create_blob_from_path(
+            container_name, local_file_name, full_path_to_file)
+
+        # List the blobs in the container
+        print("\nList blobs in the container")
+        generator = blob_service_client.list_blobs(container_name)
+        for blob in generator:
+            print("\t Blob name: " + blob.name)
+
+        # Download the blob(s).
+        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
+        full_path_to_file2 = os.path.join(local_path, str.replace(
+            local_file_name ,'.txt', '_DOWNLOADED.txt'))
+        print("\nDownloading blob to " + full_path_to_file2)
+        blob_service_client.get_blob_to_path(
+            container_name, local_file_name, full_path_to_file2)
+
+        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
+                         "application will exit.")
+        sys.stdout.flush()
+        input()
+
+        # Clean up resources. This includes the container and the temp files
+        blob_service_client.delete_container(container_name)
+        os.remove(full_path_to_file)
+        os.remove(full_path_to_file2)
+    except Exception as e:
+        print(e)

+ 40 - 0
target/Azure/classAzureProvider_2.py

@@ -0,0 +1,40 @@
+def init(self):
+        globals.printdebug(f"DEBUG: Initializing Azure DevOps SCM Provider")
+
+        self.azure_base_url = os.getenv('SYSTEM_COLLECTIONURI')
+        self.azure_api_token = os.getenv('SYSTEM_ACCESSTOKEN')
+        if not self.azure_api_token:
+            self.azure_api_token = os.getenv('AZURE_API_TOKEN')
+        self.azure_pull_request_id = os.getenv('SYSTEM_PULLREQUEST_PULLREQUESTID')
+        self.azure_project = os.getenv('SYSTEM_TEAMPROJECT')
+        self.azure_project_id = os.getenv('SYSTEM_TEAMPROJECTID')
+        self.azure_repo_id = os.getenv('BUILD_REPOSITORY_ID')
+        self.azure_build_source_branch = os.getenv('BUILD_SOURCEBRANCH')
+
+        globals.printdebug(f'DEBUG: Azure DevOps base_url={self.azure_base_url} api_token={self.azure_api_token} '
+                           f'pull_request_id={self.azure_pull_request_id} project={self.azure_project} '
+                           f'project_id={self.azure_project_id} repo_id={self.azure_repo_id}')
+
+        if not self.azure_base_url or not self.azure_project or not self.azure_repo_id or not self.azure_api_token \
+                or not self.azure_project_id:
+            print(f'BD-Scan-Action: ERROR: Azure DevOps requires that SYSTEM_COLLECTIONURI, SYSTEM_TEAMPROJECT,'
+                  'SYSTEM_TEAMPROJECTID, SYSTEM_ACCESSTOKEN or AZURE_API_TOKEN, and BUILD_REPOSITORY_ID be set.')
+            sys.exit(1)
+
+        if globals.args.comment_on_pr and not self.azure_pull_request_id:
+            print(f'BD-Scan-Action: ERROR: Azure DevOps requires that SYSTEM_PULLREQUEST_PULLREQUESTID be set'
+                  'when operating on a pull request')
+            sys.exit(1)
+
+        if globals.args.fix_pr and not self.azure_build_source_branch:
+            print(f'BD-Scan-Action: ERROR: Azure DevOps requires that BUILD_SOURCEBRANCH be set'
+                  'when operating on a pull request')
+            sys.exit(1)
+
+        self.azure_credentials = BasicAuthentication('', self.azure_api_token)
+        self.azure_connection = Connection(base_url=self.azure_base_url, creds=self.azure_credentials)
+
+        # Get a client (the "core" client provides access to projects, teams, etc)
+        self.azure_git_client = self.azure_connection.clients.get_git_client()
+
+        return True

+ 30 - 0
target/Azure/classAzureProvider_3.py

@@ -0,0 +1,30 @@
+def azure_create_branch(self, from_ref, branch_name):
+        authorization = str(base64.b64encode(bytes(':' + self.azure_api_token, 'ascii')), 'ascii')
+
+        url = f"{self.azure_base_url}/_apis/git/repositories/{self.azure_repo_id}/refs?api-version=6.0"
+
+        headers = {
+            'Authorization': 'Basic ' + authorization
+        }
+
+        body = [
+            {
+                'name': f"refs/heads/{branch_name}",
+                'oldObjectId': '0000000000000000000000000000000000000000',
+                'newObjectId': from_ref
+            }
+        ]
+
+        if globals.debug > 0:
+            print("DEBUG: perform API Call to ADO: " + url + " : " + json.dumps(body, indent=4, sort_keys=True) + "\n")
+        r = requests.post(url, json=body, headers=headers)
+
+        if r.status_code == 200:
+            if globals.debug > 0:
+                print(f"DEBUG: Success creating branch")
+                print(r.text)
+            return True
+        else:
+            print(f"BD-Scan-Action: ERROR: Failure creating branch: Error {r.status_code}")
+            print(r.text)
+            return False

+ 81 - 0
target/Azure/classAzureProvider_4.py

@@ -0,0 +1,81 @@
+def comp_commit_file_and_create_fixpr(self, comp, files_to_patch):
+        if len(files_to_patch) == 0:
+            print('BD-Scan-Action: WARN: Unable to apply fix patch - cannot determine containing package file')
+            return False
+
+        new_branch_seed = '%030x' % random.randrange(16 ** 30)
+        new_branch_name = f"synopsys-enablement-{new_branch_seed}"
+
+        globals.printdebug(f"DEBUG: Get commit for head of {self.azure_build_source_branch}'")
+
+        commits = self.azure_git_client.get_commits(self.azure_repo_id, None)
+        head_commit = commits[0]
+
+        globals.printdebug(f"DEBUG: Head commit={head_commit.commit_id}")
+
+        globals.printdebug(f"DEBUG: Creating new ref 'refs/heads/{new_branch_name}'")
+        self.azure_create_branch(head_commit.commit_id, new_branch_name)
+
+        gitRefUpdate = GitRefUpdate()
+        gitRefUpdate.name = f"refs/heads/{new_branch_name}"
+        gitRefUpdate.old_object_id = head_commit.commit_id
+
+        gitPush = GitPush()
+        gitPush.commits = []
+        gitPush.ref_updates = [gitRefUpdate]
+
+        # for file_to_patch in globals.files_to_patch:
+        for pkgfile in files_to_patch:
+            globals.printdebug(f"DEBUG: Upload file '{pkgfile}'")
+            try:
+                with open(files_to_patch[pkgfile], 'r') as fp:
+                    new_contents = fp.read()
+            except Exception as exc:
+                print(f"BD-Scan-Action: ERROR: Unable to open package file '{files_to_patch[pkgfile]}'"
+                      f" - {str(exc)}")
+                return False
+
+            gitCommitRef = GitCommitRef()
+            gitCommitRef.comment = "Added Synopsys pipeline template"
+            gitCommitRef.changes = [
+                {
+                    'changeType': 'edit',
+                    'item': {
+                        'path': pkgfile
+                    },
+                    'newContent': {
+                        'content': new_contents,
+                        'contentType': 'rawText'
+                    }
+                }
+            ]
+
+            gitPush.commits.append(gitCommitRef)
+
+            # globals.printdebug(f"DEBUG: Update file '{pkgfile}' with commit message '{commit_message}'")
+            # file = repo.update_file(pkgfile, commit_message, new_contents, orig_contents.sha, branch=new_branch_name)
+
+        push = self.azure_git_client.create_push(gitPush, self.azure_repo_id)
+
+        if not push:
+            print(f"BD-Scan-Action: ERROR: Create push failed")
+            sys.exit(1)
+
+        pr_title = f"Black Duck: Upgrade {comp.name} to version {comp.goodupgrade} fix known security vulerabilities"
+        pr_body = f"\n# Synopsys Black Duck Auto Pull Request\n" \
+                  f"Upgrade {comp.name} from version {comp.version} to " \
+                  f"{comp.goodupgrade} in order to fix security vulnerabilities:\n\n"
+
+        gitPullRequest = GitPullRequest()
+        gitPullRequest.source_ref_name = f"refs/heads/{new_branch_name}"
+        gitPullRequest.target_ref_name = self.azure_build_source_branch
+        gitPullRequest.title = pr_title
+        gitPullRequest.description = pr_body
+
+        pull = self.azure_git_client.create_pull_request(gitPullRequest, self.azure_repo_id)
+
+        if not pull:
+            print(f"BD-Scan-Action: ERROR: Create pull request failed")
+            sys.exit(1)
+
+        return True

+ 45 - 0
target/Azure/classAzureProvider_6.py

@@ -0,0 +1,45 @@
+def pr_comment(self, comment):
+        pr_threads = self.azure_git_client.get_threads(self.azure_repo_id, self.azure_pull_request_id)
+        existing_thread = None
+        existing_comment = None
+        for pr_thread in pr_threads:
+            for pr_thread_comment in pr_thread.comments:
+                if pr_thread_comment.content and globals.comment_on_pr_header in pr_thread_comment.content:
+                    existing_thread = pr_thread
+                    existing_comment = pr_thread_comment
+
+        comments_markdown = f"# {globals.comment_on_pr_header}\n{comment}"
+
+        if len(comments_markdown) > 65535:
+            comments_markdown = comments_markdown[:65535]
+
+        if existing_comment is not None:
+            globals.printdebug(f"DEBUG: Update/edit existing comment for PR #{self.azure_pull_request_id}\n"
+                               f"{comments_markdown}")
+
+            pr_thread_comment = Comment()
+            pr_thread_comment.parent_comment_id = 0
+            pr_thread_comment.content = comments_markdown
+            pr_thread_comment.comment_type = 1
+
+            retval = self.azure_git_client.update_comment(pr_thread_comment, self.azure_repo_id,
+                                                          self.azure_pull_request_id, existing_thread.id,
+                                                          existing_comment.id)
+
+            globals.printdebug(f"DEBUG: Updated thread, retval={retval}")
+        else:
+            globals.printdebug(f"DEBUG: Create new thread for PR #{self.azure_pull_request_id}")
+
+            pr_thread_comment = Comment()
+            pr_thread_comment.parent_comment_id = 0
+            pr_thread_comment.content = comments_markdown
+            pr_thread_comment.comment_type = 1
+
+            pr_thread = GitPullRequestCommentThread()
+            pr_thread.comments = [pr_thread_comment]
+            pr_thread.status = 1
+
+            retval = self.azure_git_client.create_thread(pr_thread, self.azure_repo_id, self.azure_pull_request_id)
+
+            globals.printdebug(f"DEBUG: Created thread, retval={retval}")
+        return True

+ 23 - 0
target/Azure/client_40.py

@@ -0,0 +1,23 @@
+def get_azure_secret(self, vault_name, key_name, version=None, **kwargs):
+        '''
+        @dev get secret from azure key vault
+        @param vault_name key vault name
+        @param key_name sercrt's key
+        @param version version of the secret to get. if unspecified, gets the latest version
+        @return secret(KeyVaultSecret)
+        '''
+        update_mempool = True
+        secret = None
+        key = self.create_memory_key_with_args(vault_name, key_name, version)
+        if self.use_mempool:
+            secret = self.get_memory_key_value(key)
+            if not secret:
+                secret = azure_get_secret(vault_name, key_name, version, **kwargs)
+            else:
+                update_mempool = False
+        else:
+            secret = azure_get_secret(vault_name, key_name, version, **kwargs)
+
+        if update_mempool:
+            self.set_memory_key_value(key, secret)
+        return secret

+ 23 - 0
target/Azure/client_41.py

@@ -0,0 +1,23 @@
+def get_azure_secret_value(self, vault_name, key_name, version=None, **kwargs):
+        '''
+        @dev get secret from azure key vault
+        @param vault_name name of key vault
+        @param key_name the name of secret
+        @param key_value the value of secret
+        @return value of secret(KeyVaultSecret)
+        '''
+        secret = None
+        update_mempool = True
+        key = self.create_memory_key_with_args(vault_name, key_name, version, "value")
+        if self.use_mempool:
+            secret = self.get_memory_key_value(key)
+            if not secret:
+                secret = azure_get_secret(vault_name, key_name, version, **kwargs).value
+            else:
+                update_mempool = False
+        else:
+            secret = azure_get_secret(vault_name, key_name, version, **kwargs).value
+
+        if update_mempool:
+            self.set_memory_key_value(key, secret)
+        return secret

+ 2 - 0
target/Azure/client_48.py

@@ -0,0 +1,2 @@
+def get_azure_secrets_keys(self, vault_name):
+        return azure_get_secrets_keys(vault_name)

+ 13 - 0
target/Azure/container_volume_5.py

@@ -0,0 +1,13 @@
+def get_file_volume(
+        self, mount_name: str, share_name: str, storage_account_name: str, read_only: bool = False
+    ) -> Volume:
+        """Get Azure File Volume"""
+        return Volume(
+            name=mount_name,
+            azure_file=AzureFileVolume(
+                share_name=share_name,
+                storage_account_name=storage_account_name,
+                read_only=read_only,
+                storage_account_key=self.get_storagekey(),
+            ),
+        )

+ 14 - 0
target/Azure/data_lake_4.py

@@ -0,0 +1,14 @@
+def get_conn(self) -> core.AzureDLFileSystem:
+        """Return a AzureDLFileSystem object."""
+        if not self._conn:
+            conn = self.get_connection(self.conn_id)
+            service_options = conn.extra_dejson
+            self.account_name = service_options.get('account_name') or service_options.get(
+                'extra__azure_data_lake__account_name'
+            )
+            tenant = service_options.get('tenant') or service_options.get('extra__azure_data_lake__tenant')
+
+            adl_creds = lib.auth(tenant_id=tenant, client_secret=conn.password, client_id=conn.login)
+            self._conn = core.AzureDLFileSystem(adl_creds, store_name=self.account_name)
+            self._conn.connect()
+        return self._conn

+ 13 - 0
target/Azure/django-blob_5.py

@@ -0,0 +1,13 @@
+def _open(self, name, mode='rb'):
+        """
+        Return the AzureStorageFile.
+        """
+
+        from django.core.files.base import ContentFile
+
+        contents = self._get_service().get_blob_to_bytes(
+            container_name=self.container,
+            blob_name=name
+        )
+
+        return ContentFile(contents)

+ 34 - 0
target/Azure/django-blob_6.py

@@ -0,0 +1,34 @@
+def _save(self, name, content):
+        """
+        Use the Azure Storage service to write ``content`` to a remote file
+        (called ``name``).
+        """
+        
+
+        content.open()
+
+        content_type = None
+
+        if hasattr(content.file, 'content_type'):
+            content_type = content.file.content_type
+        else:
+            content_type = mimetypes.guess_type(name)[0]
+
+        cache_control = self.get_cache_control(
+            self.container,
+            name,
+            content_type
+        )
+
+        self._get_service().put_block_blob_from_file(
+            container_name=self.container,
+            blob_name=name,
+            stream=content,
+            x_ms_blob_content_type=content_type,
+            cache_control=cache_control,
+            x_ms_blob_cache_control=cache_control
+        )
+
+        content.close()
+
+        return name

+ 22 - 0
target/Azure/django-blob_7.py

@@ -0,0 +1,22 @@
+def listdir(self, path):
+        """
+        Lists the contents of the specified path, returning a 2-tuple of lists;
+        the first item being directories, the second item being files.
+        """
+
+        files = []
+
+        if path and not path.endswith('/'):
+            path = '%s/' % path
+
+        path_len = len(path)
+
+        if not path:
+            path = None
+
+        blob_list = self._get_service().list_blobs(self.container, prefix=path)
+
+        for name in blob_list:
+            files.append(name[path_len:])
+
+        return ([], files)

+ 12 - 0
target/Azure/reproduce-14067_1.py

@@ -0,0 +1,12 @@
+def execute_code(loop, timeout=None):
+    with BytesIO() as file_stream:
+        service_principal = DefaultAzureCredential(exclude_cli_credential=True)
+        future = asyncio.run_coroutine_threadsafe(
+            download_blob_using_blobclient(account_name,service_principal, container_name, blob_to_read, file_stream),
+            loop=loop)
+        future.result(timeout)
+        file_stream.flush()
+        file_stream.seek(0)
+        bw=TextIOWrapper(file_stream).read()
+        print(bw)
+        return

+ 48 - 0
target/Azure/submit_azureml_pytest_1.py

@@ -0,0 +1,48 @@
+def setup_workspace(workspace_name, subscription_id, resource_group, cli_auth,
+                    location):
+    """
+    This sets up an Azure Workspace.
+    An existing Azure Workspace is used or a new one is created if needed for
+    the pytest run.
+
+    Args:
+        workspace_name  (str): Centralized location on Azure to work
+                               with all the artifacts used by AzureML
+                               service
+        subscription_id (str): the Azure subscription id
+        resource_group  (str): Azure Resource Groups are logical collections of
+                         assets associated with a project. Resource groups
+                         make it easy to track or delete all resources
+                         associated with a project by tracking or deleting
+                         the Resource group.
+        cli_auth         Azure authentication
+        location        (str): workspace reference
+
+    Returns:
+        ws: workspace reference
+    """
+    logger.debug('setup: workspace_name is {}'.format(workspace_name))
+    logger.debug('setup: resource_group is {}'.format(resource_group))
+    logger.debug('setup: subid is {}'.format(subscription_id))
+    logger.debug('setup: location is {}'.format(location))
+
+    try:
+            # use existing workspace if there is one
+            ws = Workspace.get(
+                name=workspace_name,
+                subscription_id=subscription_id,
+                resource_group=resource_group,
+                auth=cli_auth
+            )
+    except WorkspaceException:
+            # this call might take a minute or two.
+            logger.debug('Creating new workspace')
+            ws = Workspace.create(
+                name=workspace_name,
+                subscription_id=subscription_id,
+                resource_group=resource_group,
+                # create_resource_group=True,
+                location=location,
+                auth=cli_auth
+            )
+    return ws

+ 40 - 0
target/Azure/submit_azureml_pytest_2.py

@@ -0,0 +1,40 @@
+def setup_persistent_compute_target(workspace, cluster_name, vm_size,
+                                    max_nodes):
+    """
+    Set up a persistent compute target on AzureML.
+    A persistent compute target runs noticeably faster than a
+    regular compute target for subsequent runs.  The benefit
+    is that AzureML manages turning the compute on/off as needed for
+    each job so the user does not need to do this.
+
+    Args:
+        workspace    (str): Centralized location on Azure to work with
+                         all the
+                                artifacts used by AzureML service
+        cluster_name (str): the Azure cluster for this run. It can
+                            already exist or it will be created.
+        vm_size      (str): Azure VM size, like STANDARD_D3_V2
+        max_nodes    (int): Number of VMs, max_nodes=4 will
+                            autoscale up to 4 VMs
+    Returns:
+        cpu_cluster : cluster reference
+    """
+    # setting vmsize and num nodes creates a persistent AzureML
+    # compute resource
+
+    logger.debug("setup: cluster_name {}".format(cluster_name))
+    # https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-set-up-training-targets
+
+    try:
+        cpu_cluster = ComputeTarget(workspace=workspace, name=cluster_name)
+        logger.debug('setup: Found existing cluster, use it.')
+    except ComputeTargetException:
+        logger.debug('setup: create cluster')
+        compute_config = AmlCompute.provisioning_configuration(
+                       vm_size=vm_size,
+                       max_nodes=max_nodes)
+        cpu_cluster = ComputeTarget.create(workspace,
+                                           cluster_name,
+                                           compute_config)
+    cpu_cluster.wait_for_completion(show_output=True)
+    return cpu_cluster

+ 35 - 0
target/Azure/submit_azureml_pytest_3.py

@@ -0,0 +1,35 @@
+def create_run_config(cpu_cluster, docker_proc_type, conda_env_file):
+    """
+    AzureML requires the run environment to be setup prior to submission.
+    This configures a docker persistent compute.  Even though
+    it is called Persistent compute, AzureML handles startup/shutdown
+    of the compute environment.
+
+    Args:
+        cpu_cluster      (str) : Names the cluster for the test
+                                 In the case of unit tests, any of
+                                 the following:
+                                 - Reco_cpu_test
+                                 - Reco_gpu_test
+        docker_proc_type (str) : processor type, cpu or gpu
+        conda_env_file   (str) : filename which contains info to
+                                 set up conda env
+    Return:
+          run_amlcompute : AzureML run config
+    """
+
+    # runconfig with max_run_duration_seconds did not work, check why:
+    # run_amlcompute = RunConfiguration(max_run_duration_seconds=60*30)
+    run_amlcompute = RunConfiguration()
+    run_amlcompute.target = cpu_cluster
+    run_amlcompute.environment.docker.enabled = True
+    run_amlcompute.environment.docker.base_image = docker_proc_type
+
+    # Use conda_dependencies.yml to create a conda environment in
+    # the Docker image for execution
+    # False means the user will provide a conda file for setup
+    # True means the user will manually configure the environment
+    run_amlcompute.environment.python.user_managed_dependencies = False
+    run_amlcompute.environment.python.conda_dependencies = CondaDependencies(
+            conda_dependencies_file_path=conda_env_file)
+    return run_amlcompute

+ 16 - 0
target/Azure/submit_azureml_pytest_4.py

@@ -0,0 +1,16 @@
+def create_experiment(workspace, experiment_name):
+    """
+    AzureML requires an experiment as a container of trials.
+    This will either create a new experiment or use an
+    existing one.
+
+    Args:
+        workspace (str) : name of AzureML workspace
+        experiment_name (str) : AzureML experiment name
+    Return:
+        exp - AzureML experiment
+    """
+
+    logger.debug('create: experiment_name {}'.format(experiment_name))
+    exp = Experiment(workspace=workspace, name=experiment_name)
+    return(exp)

+ 48 - 0
target/Azure/submit_azureml_pytest_5.py

@@ -0,0 +1,48 @@
+def submit_experiment_to_azureml(test, test_folder, test_markers, junitxml,
+                                 run_config, experiment):
+
+    """
+    Submitting the experiment to AzureML actually runs the script.
+
+    Args:
+        test         (str) - pytest script, folder/test
+                             such as ./tests/ci/run_pytest.py
+        test_folder  (str) - folder where tests to run are stored,
+                             like ./tests/unit
+        test_markers (str) - test markers used by pytest
+                             "not notebooks and not spark and not gpu"
+        junitxml     (str) - file of output summary of tests run
+                             note "--junitxml" is required as part
+                             of the string
+                             Example: "--junitxml reports/test-unit.xml"
+        run_config - environment configuration
+        experiment - instance of an Experiment, a collection of
+                     trials where each trial is a run.
+    Return:
+          run : AzureML run or trial
+    """
+
+    logger.debug('submit: testfolder {}'.format(test_folder))
+    logger.debug('junitxml: {}'.format(junitxml))
+    project_folder = "."
+
+    script_run_config = ScriptRunConfig(source_directory=project_folder,
+                                        script=test,
+                                        run_config=run_config,
+                                        arguments=["--testfolder",
+                                                   test_folder,
+                                                   "--testmarkers",
+                                                   test_markers,
+                                                   "--xmlname",
+                                                   junitxml]
+                                        )
+    run = experiment.submit(script_run_config)
+    # waits only for configuration to complete
+    run.wait_for_completion(show_output=True, wait_post_processing=True)
+
+    # test logs can also be found on azure
+    # go to azure portal to see log in azure ws and look for experiment name
+    # and look for individual run
+    logger.debug('files {}'.format(run.get_file_names))
+
+    return run

+ 96 - 0
target/Azure/submit_azureml_pytest_6.py

@@ -0,0 +1,96 @@
+def create_arg_parser():
+    """
+    Many of the argument defaults are used as arg_parser makes it easy to
+    use defaults. The user has many options they can select.
+    """
+
+    parser = argparse.ArgumentParser(description='Process some inputs')
+    # script to run pytest
+    parser.add_argument("--test",
+                        action="store",
+                        default="./tests/ci/run_pytest.py",
+                        help="location of script to run pytest")
+    # test folder
+    parser.add_argument("--testfolder",
+                        action="store",
+                        default="./tests/unit",
+                        help="folder where tests are stored")
+    # pytest test markers
+    parser.add_argument("--testmarkers",
+                        action="store",
+                        default="not notebooks and not spark and not gpu",
+                        help="pytest markers indicate tests to run")
+    # test summary file
+    parser.add_argument("--junitxml",
+                        action="store",
+                        default="reports/test-unit.xml",
+                        help="file for returned test results")
+    # max num nodes in Azure cluster
+    parser.add_argument("--maxnodes",
+                        action="store",
+                        default=4,
+                        help="specify the maximum number of nodes for the run")
+    # Azure resource group
+    parser.add_argument("--rg",
+                        action="store",
+                        default="recommender",
+                        help="Azure Resource Group")
+    # AzureML workspace Name
+    parser.add_argument("--wsname",
+                        action="store",
+                        default="RecoWS",
+                        help="AzureML workspace name")
+    # AzureML clustername
+    parser.add_argument("--clustername",
+                        action="store",
+                        default="amlcompute",
+                        help="Set name of Azure cluster")
+    # Azure VM size
+    parser.add_argument("--vmsize",
+                        action="store",
+                        default="STANDARD_D3_V2",
+                        help="Set the size of the VM either STANDARD_D3_V2")
+    # cpu or gpu
+    parser.add_argument("--dockerproc",
+                        action="store",
+                        default="cpu",
+                        help="Base image used in docker container")
+    # Azure subscription id, when used in a pipeline, it is stored in keyvault
+    parser.add_argument("--subid",
+                        action="store",
+                        default="123456",
+                        help="Azure Subscription ID")
+    # ./reco.yaml is created in the azure devops pipeline.
+    # Not recommended to change this.
+    parser.add_argument("--condafile",
+                        action="store",
+                        default="./reco.yaml",
+                        help="file with environment variables")
+    # AzureML experiment name
+    parser.add_argument("--expname",
+                        action="store",
+                        default="persistentAML",
+                        help="experiment name on Azure")
+    # Azure datacenter location
+    parser.add_argument("--location",
+                        default="EastUS",
+                        help="Azure location")
+    # github repo, stored in AzureML experiment for info purposes
+    parser.add_argument("--reponame",
+                        action="store",
+                        default="--reponame MyGithubRepo",
+                        help="GitHub repo being tested")
+    # github branch, stored in AzureML experiment for info purposes
+    parser.add_argument("--branch",
+                        action="store",
+                        default="--branch MyGithubBranch",
+                        help=" Identify the branch test test is run on")
+    # github pull request, stored in AzureML experiment for info purposes
+    parser.add_argument("--pr",
+                        action="store",
+                        default="--pr PRTestRun",
+                        help="If a pr triggered the test, list it here")
+
+    args = parser.parse_args()
+
+    return args

+ 61 - 0
target/Azure/table-service_2.py

@@ -0,0 +1,61 @@
+def _process_one_package(package_name, version):
+    logger.info("Worker: Package:{} Version:{}".format(package_name, version))
+    if not package_name or not version:
+        logger.warn("Package_name or version was empty. Moving on as the queue had bad data")
+        return
+
+    # .6684 seconds to run.  74577 total packages
+    package_info = PyPIPackageInformation.get_package_specific_version_info(package_name, version)
+    if not package_info:
+        logger.error("Worker: Package:{} Version:{} failed to get package info".format(package_name, version))
+        return
+
+    supports_python_2 = len([x for x in package_info['classifiers'] if x.startswith('Programming Language :: Python :: 2')]) > 0
+    supports_python_3 = len([x for x in package_info['classifiers'] if x.startswith('Programming Language :: Python :: 3')]) > 0
+    uploaded = package_info['uploaded']
+
+    try:
+        summary_entity = table_service.get_entity(config.PACKAGE_SUMMARY_TABLENAME, package_name, STATIC_ROW_KEY)
+    except:
+        # we don't have a summary for this entry.
+        summary_entity = { 
+            'PartitionKey':package_name, 'RowKey':STATIC_ROW_KEY, 'First_Published':None, 
+            'Python2_Start':None, 'Python2_End':None, 'Python3_Start':None
+            }
+        table_service.insert_or_replace_entity(config.PACKAGE_SUMMARY_TABLENAME, package_name, STATIC_ROW_KEY, summary_entity)
+        summary_entity = table_service.get_entity(config.PACKAGE_SUMMARY_TABLENAME, package_name, STATIC_ROW_KEY)
+
+    # set fields using upload. Upload is none if the version has never been uploaded
+    # Basically just filter out packages that never have content from our records.
+    if uploaded is not None:
+        if not hasattr(summary_entity, 'First_Published') or summary_entity.First_Published is None or summary_entity.First_Published > uploaded:
+            # if the published date is empty or later than the current release we
+            # are viewing update
+            summary_entity.First_Published = uploaded
+
+        if supports_python_2 and \
+            (not hasattr(summary_entity, 'Python2_Start') or summary_entity.Python2_Start is None or summary_entity.Python2_Start > uploaded):
+            # if the published date is empty or later than the date and it supports
+            # python 2
+            summary_entity.Python2_Start = uploaded
+    
+        if supports_python_2 and hasattr(summary_entity, 'Python2_End') and summary_entity.Python2_End is not None and summary_entity.Python2_End < uploaded:
+            # we support python2 but it is after the date we thought python 2
+            # support ended we must not have really ended
+            summary_entity.Python2_End = None    
+        elif hasattr(summary_entity, 'Python2_Start') and hasattr(summary_entity, 'Python2_End') and \
+            summary_entity.Python2_Start is not None and summary_entity.Python2_End is not None and \
+            (summary_entity.Python2_End > uploaded and summary_entity.Python2_Start < uploaded):
+            # if we don't support python2, and we have started supporting python2
+            # at some point
+            # and if the date we are saying we ended is after the start
+            summary_entity.Python2_End = uploaded
+
+        if supports_python_3 and \
+            (not hasattr(summary_entity, 'Python3_Start') or summary_entity.Python3_Start is None or summary_entity.Python3_Start > uploaded):
+            # if the published date is empty or later than the current release we
+            # are viewing update
+            summary_entity.Python3_Start = uploaded
+
+    version_entity = _insert_entity_to_package_version_table(package_name, version, supports_python_2, supports_python_3, package_info['downloads'], uploaded)
+    summary_entity = table_service.insert_or_replace_entity(config.PACKAGE_SUMMARY_TABLENAME, package_name, STATIC_ROW_KEY, summary_entity)

+ 19 - 0
target/Azure/table-service_3.py

@@ -0,0 +1,19 @@
+def _insert_entity_to_package_version_table(package, version, python2, python3, downloads, upload_time):
+    # TODO: issue with python azure storage.  Version can't have '~' in it. https://github.com/Azure/azure-storage-python/issues/76
+    package_sanitized = urllib.parse.quote_plus(package)
+    version_sanitized = urllib.parse.quote_plus(version)
+
+    try:
+        result =  table_service.insert_or_replace_entity(config.PACKAGE_VERSION_DATA_TABLENAME, package_sanitized, version_sanitized,
+                                    {'PartitionKey' : package_sanitized,
+                                     'RowKey': version_sanitized, 
+                                     'Python2': python2, 
+                                     'Python3': python3,
+                                     'Downloads': downloads,
+                                     'UploadTime': upload_time})
+
+        return result
+    except Exception as e:
+        logger.error("Failed to insert Package:{} Version:{} Python2:{} Python3:{} Downloads:{} UploadTime:{} Exception:{}".format(
+            package, version, python2, python3, downloads, upload_time, traceback.format_exc()))
+        raise e

+ 24 - 0
target/Azure/table-storage_2.py

@@ -0,0 +1,24 @@
+def run_all_samples(self, account):
+        table_service = account.create_table_service()
+        print('Azure Storage Advanced Table samples - Starting.')
+        
+        print('\n\n* List tables *\n')
+        self.list_tables(table_service)
+        
+        if not account.is_azure_cosmosdb_table():
+           print('\n\n* Set service properties *\n')
+           self.set_service_properties(table_service)
+        
+           print('\n\n* Set Cors rules *\n')
+           self.set_cors_rules(table_service)
+        
+           print('\n\n* ACL operations *\n')
+           self.table_acl_operations(table_service)
+        
+        if (config.IS_EMULATED):
+            print('\n\n* Shared Access Signature is not supported in emulator *\n')
+        else:
+            print('\n\n* SAS operations *\n')
+            self.table_operations_with_sas(account)
+
+        print('\nAzure Storage Advanced Table samples - Completed.\n')

+ 14 - 0
target/Azure/test_adx_2.py

@@ -0,0 +1,14 @@
+def test_conn_missing_method(self):
+        db.merge_conn(
+            Connection(
+                conn_id=ADX_TEST_CONN_ID,
+                conn_type='azure_data_explorer',
+                login='client_id',
+                password='client secret',
+                host='https://help.kusto.windows.net',
+                extra=json.dumps({}),
+            )
+        )
+        with pytest.raises(AirflowException) as ctx:
+            AzureDataExplorerHook(azure_data_explorer_conn_id=ADX_TEST_CONN_ID)
+            assert 'is missing: `extra__azure_data_explorer__auth_method`' in str(ctx.value)

+ 14 - 0
target/Azure/test_adx_3.py

@@ -0,0 +1,14 @@
+def test_conn_unknown_method(self):
+        db.merge_conn(
+            Connection(
+                conn_id=ADX_TEST_CONN_ID,
+                conn_type='azure_data_explorer',
+                login='client_id',
+                password='client secret',
+                host='https://help.kusto.windows.net',
+                extra=json.dumps({'extra__azure_data_explorer__auth_method': 'AAD_OTHER'}),
+            )
+        )
+        with pytest.raises(AirflowException) as ctx:
+            AzureDataExplorerHook(azure_data_explorer_conn_id=ADX_TEST_CONN_ID)
+        assert 'Unknown authentication method: AAD_OTHER' in str(ctx.value)

+ 13 - 0
target/Azure/test_adx_4.py

@@ -0,0 +1,13 @@
+def test_conn_missing_cluster(self):
+        db.merge_conn(
+            Connection(
+                conn_id=ADX_TEST_CONN_ID,
+                conn_type='azure_data_explorer',
+                login='client_id',
+                password='client secret',
+                extra=json.dumps({}),
+            )
+        )
+        with pytest.raises(AirflowException) as ctx:
+            AzureDataExplorerHook(azure_data_explorer_conn_id=ADX_TEST_CONN_ID)
+        assert 'Host connection option is required' in str(ctx.value)

+ 23 - 0
target/Azure/test_adx_5.py

@@ -0,0 +1,23 @@
+def test_conn_method_aad_creds(self, mock_init):
+        mock_init.return_value = None
+        db.merge_conn(
+            Connection(
+                conn_id=ADX_TEST_CONN_ID,
+                conn_type='azure_data_explorer',
+                login='client_id',
+                password='client secret',
+                host='https://help.kusto.windows.net',
+                extra=json.dumps(
+                    {
+                        'extra__azure_data_explorer__tenant': 'tenant',
+                        'extra__azure_data_explorer__auth_method': 'AAD_CREDS',
+                    }
+                ),
+            )
+        )
+        AzureDataExplorerHook(azure_data_explorer_conn_id=ADX_TEST_CONN_ID)
+        assert mock_init.called_with(
+            KustoConnectionStringBuilder.with_aad_user_password_authentication(
+                'https://help.kusto.windows.net', 'client_id', 'client secret', 'tenant'
+            )
+        )

Alguns arquivos não foram mostrados porque muitos arquivos mudaram nesse diff