-
Notifications
You must be signed in to change notification settings - Fork 1
Tools - add common tools for LTP administration #62
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from all commits
Commits
Show all changes
4 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,305 @@ | ||
# Copyright (c) Microsoft Corporation. | ||
# Licensed under the MIT License. | ||
|
||
# This tool is used to create blob storage PV/PVC and assign to group. | ||
# Usage: python3 addBlobStorage.py | ||
|
||
import yaml | ||
import subprocess | ||
import requests | ||
import os | ||
import json | ||
import sys | ||
|
||
|
||
def create_pv_yaml(storage_name, storage_account, resource_group, identity, output_file): | ||
container_name = storage_name.replace("-", "") | ||
blob_name = f'blob-{container_name}-{storage_account}' | ||
|
||
pv_template = { | ||
'apiVersion': 'v1', | ||
'kind': 'PersistentVolume', | ||
'metadata': { | ||
'name': blob_name, | ||
}, | ||
'spec': { | ||
'accessModes': ['ReadWriteMany'], | ||
'capacity': { | ||
'storage': '1Pi' | ||
}, | ||
'csi': { | ||
'driver': 'blob.csi.azure.com', | ||
'volumeAttributes': { | ||
'AzureStorageAuthType': 'MSI', | ||
'AzureStorageIdentityClientID': identity, | ||
'containerName': storage_name, | ||
'protocol': 'fuse', | ||
'resourceGroup': resource_group, | ||
'storageAccount': storage_account | ||
}, | ||
'volumeHandle': blob_name | ||
}, | ||
'mountOptions': [ | ||
'--allow-other', | ||
'--attr-timeout=3600', | ||
'--entry-timeout=3600', | ||
'--attr-cache-timeout=7200', | ||
'--block-cache', | ||
'--block-cache-pool-size=81920', | ||
'--block-cache-block-size=1', | ||
f'--block-cache-path=/mnt/blobfusecache-{container_name}-{storage_account}', | ||
'--block-cache-disk-size=1572864', | ||
'--block-cache-prefetch=12', | ||
'--block-cache-prefetch-on-open=false', | ||
'--cleanup-on-start=true' | ||
], | ||
'persistentVolumeReclaimPolicy': 'Retain', | ||
'storageClassName': 'azureblob-fuse-premium', | ||
'volumeMode': 'Filesystem' | ||
} | ||
} | ||
|
||
with open(output_file, 'w') as file: | ||
yaml.dump(pv_template, file) | ||
|
||
return blob_name | ||
|
||
def create_pv_out_yaml(storage_name, storage_account, resource_group, identity, output_file): | ||
container_name = storage_name.replace("-", "") | ||
blob_name = f'blob-{container_name}-{storage_account}-out' | ||
|
||
pvc_template = { | ||
'apiVersion': 'v1', | ||
'kind': 'PersistentVolume', | ||
'metadata': { | ||
'name': blob_name, | ||
}, | ||
'spec': { | ||
'accessModes': ['ReadWriteMany'], | ||
'capacity': { | ||
'storage': '1Pi' | ||
}, | ||
'csi': { | ||
'driver': 'blob.csi.azure.com', | ||
'volumeAttributes': { | ||
'AzureStorageAuthType': 'MSI', | ||
'AzureStorageIdentityClientID': identity, | ||
'containerName': storage_name, | ||
'protocol': 'fuse', | ||
'resourceGroup': resource_group, | ||
'storageAccount': storage_account | ||
}, | ||
'volumeHandle': blob_name | ||
}, | ||
'mountOptions': [ | ||
'--allow-other', | ||
'--attr-timeout=600', | ||
'--entry-timeout=600', | ||
'--attr-cache-timeout=600', | ||
'--file-cache-timeout=3600', | ||
f'--tmp-path=/mnt/blobfusecache-{container_name}-{storage_account}-out', | ||
'--cache-size-mb=512000', | ||
'--lazy-write', | ||
'--cleanup-on-start=true' | ||
], | ||
'persistentVolumeReclaimPolicy': 'Retain', | ||
'storageClassName': 'azureblob-fuse-premium', | ||
'volumeMode': 'Filesystem' | ||
} | ||
} | ||
|
||
with open(output_file, 'w') as file: | ||
yaml.dump(pvc_template, file) | ||
|
||
return blob_name | ||
|
||
def apply_yaml_to_aks(yaml_file): | ||
try: | ||
subprocess.run(["kubectl", "apply", "-f", yaml_file], check=True) | ||
print(f"Successfully applied {yaml_file} to AKS.") | ||
except subprocess.CalledProcessError as e: | ||
print(f"Failed to apply {yaml_file} to AKS: {e}") | ||
|
||
def create_pvc_yaml(blob_name, output_file): | ||
pvc_template = { | ||
'apiVersion': 'v1', | ||
'kind': 'PersistentVolumeClaim', | ||
'metadata': { | ||
'name': blob_name, | ||
'namespace': 'default' | ||
}, | ||
'spec': { | ||
'accessModes': ['ReadWriteMany'], | ||
'resources': { | ||
'requests': { | ||
'storage': '1Pi' | ||
} | ||
}, | ||
'volumeName': blob_name, | ||
'storageClassName': 'azureblob-fuse-premium', | ||
'volumeMode': 'Filesystem' | ||
} | ||
} | ||
|
||
with open(output_file, 'w') as file: | ||
yaml.dump(pvc_template, file) | ||
|
||
def get_group_info(url, token): | ||
headers = { | ||
'Authorization': f'Bearer {token}' | ||
} | ||
try: | ||
response = requests.get(url, headers=headers) | ||
response.raise_for_status() # Raise an exception for HTTP errors | ||
return response.json() # Assuming the response is in JSON format | ||
except requests.exceptions.RequestException as e: | ||
print(f"An error occurred: {e}") | ||
return None | ||
|
||
def update_group_info(url, token, data): | ||
headers = { | ||
'Authorization': f'Bearer {token}', | ||
'Content-Type': 'application/json' | ||
} | ||
payload = { | ||
'patch': True, | ||
'data': data | ||
} | ||
try: | ||
response = requests.put(url, headers=headers, json=payload) | ||
response.raise_for_status() # Raise an exception for HTTP errors | ||
return response.json() # Assuming the response is in JSON format | ||
except requests.exceptions.RequestException as e: | ||
print(f"An error occurred: {e}") | ||
return None | ||
|
||
def refresh_group_info(url, token): | ||
headers = { | ||
'Authorization': f'Bearer {token}', | ||
'Content-Type': 'application/json' | ||
} | ||
response = requests.post(url, headers=headers) | ||
return response.json() | ||
|
||
|
||
if __name__ == "__main__": | ||
cluster_url = input("Enter the cluster URL (e.g. example.openpai.org): ") | ||
if not cluster_url: | ||
print("Cluster URL cannot be empty.") | ||
exit(1) | ||
|
||
identity = input("Enter the identity GUID: ") | ||
|
||
storage_account = input("Enter the storage account name: ") | ||
storage_name = input("Enter the container name: ") | ||
resource_group = input("Enter the resource group name: ") | ||
bearer_token = input("Enter the bearer token: ") | ||
groupname = input("Enter the group name: ") # split by comma if multiple groups | ||
groupname = groupname.split(",") | ||
|
||
try: | ||
in_blob_name = create_pv_yaml(storage_name, storage_account, resource_group, identity, "pv_input.yaml") | ||
except Exception as e: | ||
print(f"Failed to create input PV YAML: {e}") | ||
exit(1) | ||
|
||
print(f"Input blob: {in_blob_name} created successfully.") | ||
|
||
try: | ||
out_blob_name = create_pv_out_yaml(storage_name, storage_account, resource_group, identity, "pv_output.yaml") | ||
except Exception as e: | ||
print(f"Failed to create output PV YAML: {e}") | ||
exit(1) | ||
|
||
print(f"Output blob: {out_blob_name} created successfully.") | ||
|
||
if os.path.exists("pv_input.yaml"): | ||
apply_yaml_to_aks("pv_input.yaml") | ||
else: | ||
print("pv_input.yaml does not exist. Skipping.") | ||
|
||
print(f"Applying PV {in_blob_name} to AKS...") | ||
|
||
if os.path.exists("pv_output.yaml"): | ||
apply_yaml_to_aks("pv_output.yaml") | ||
else: | ||
print("pv_output.yaml does not exist. Skipping.") | ||
|
||
print(f"Applying PV {out_blob_name} to AKS...") | ||
|
||
os.remove("pv_input.yaml") | ||
os.remove("pv_output.yaml") | ||
|
||
print("PV YAML files removed.") | ||
|
||
try: | ||
create_pvc_yaml(in_blob_name, "pvc_input.yaml") | ||
except Exception as e: | ||
print(f"Failed to create input PVC YAML: {e}") | ||
exit(1) | ||
|
||
print(f"Input PVC blob: {in_blob_name} created successfully.") | ||
|
||
try: | ||
create_pvc_yaml(out_blob_name, "pvc_output.yaml") | ||
except Exception as e: | ||
print(f"Failed to create output PVC YAML: {e}") | ||
exit(1) | ||
|
||
print(f"Output PVC blob: {out_blob_name} created successfully.") | ||
|
||
try: | ||
apply_yaml_to_aks("pvc_input.yaml") | ||
except Exception as e: | ||
print(f"Failed to apply pvc_input.yaml to AKS: {e}") | ||
exit(1) | ||
|
||
print(f"Applying PVC {in_blob_name} to AKS...") | ||
|
||
try: | ||
apply_yaml_to_aks("pvc_output.yaml") | ||
except Exception as e: | ||
print(f"Failed to apply pvc_output.yaml to AKS: {e}") | ||
exit(1) | ||
|
||
print(f"Applying PVC {out_blob_name} to AKS...") | ||
|
||
os.remove("pvc_input.yaml") | ||
os.remove("pvc_output.yaml") | ||
|
||
print("PVC YAML files removed.") | ||
|
||
print("Updating group info...") | ||
items = [in_blob_name, out_blob_name] | ||
|
||
paiurl = f"https://{cluster_url}/rest-server/api/v2/group/" | ||
|
||
for group in groupname: | ||
url = f"{paiurl}{group}" | ||
data = get_group_info(url, bearer_token) | ||
|
||
if data: | ||
if 'storageConfigs' not in data['extension']['acls']: | ||
data['extension']['acls']['storageConfigs'] = [] | ||
for item in items: | ||
if item not in data['extension']['acls']['storageConfigs']: | ||
data['extension']['acls']['storageConfigs'].append(item) | ||
else: | ||
print(f"Failed to get group info for {group}.") | ||
continue | ||
|
||
newdata = data | ||
|
||
updated_data = update_group_info(paiurl, bearer_token, newdata) | ||
if updated_data: | ||
print(f"Group {group} info updated successfully.") | ||
else: | ||
print(f"Failed to update group info for {group}.") | ||
|
||
|
||
url = f"https://{cluster_url}/rest-server/api/v2/storages/refresh" | ||
response = refresh_group_info(url, bearer_token) | ||
|
||
print(response) | ||
print("Storage list refreshed.") | ||
print("Setup completed successfully.") |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
# Copyright (c) Microsoft Corporation. | ||
# Licensed under the MIT License. | ||
|
||
# This tool is used to refresh the PV/PVC cache in rest-server. | ||
# If we have changed the PV/PVC directly in Kubernetes, we can use this tool to make the change effective in OpenPAI. | ||
|
||
# Usage: python3 cleanStorageList.py | ||
|
||
import requests | ||
|
||
def post_request_with_token(url, token): | ||
headers = { | ||
'Authorization': f'Bearer {token}', | ||
'Content-Type': 'application/json' | ||
} | ||
response = requests.post(url, headers=headers) | ||
return response.json() | ||
|
||
def main(): | ||
cluster_url = input("Enter the cluster URL (e.g. example.openpai.org): ") | ||
if not cluster_url: | ||
print("Cluster URL cannot be empty.") | ||
exit(1) | ||
|
||
token = input("Enter the bearer token: ") | ||
print("***********************************") | ||
|
||
url = f"https://{cluster_url}/rest-server/api/v2/storages/refresh" | ||
response = post_request_with_token(url, token) | ||
print(response) | ||
|
||
if __name__ == "__main__": | ||
main() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
# Copyright (c) Microsoft Corporation. | ||
# Licensed under the MIT License. | ||
|
||
# This tool is used to delete a group in the cluster, which also removes the group from all users' group list. | ||
hippogr marked this conversation as resolved.
Show resolved
Hide resolved
|
||
# When new PAI configuration is updated and some of the groups have been changed such as its name and related email, | ||
# the new group will be created but the old group will still exist in the cluster, | ||
# so we can use this tool to delete the old group. | ||
|
||
# Usage: python3 deleteGroup.py <group_name> | ||
|
||
import requests | ||
import sys | ||
|
||
def delete_group(bearer_token, url): | ||
hippogr marked this conversation as resolved.
Show resolved
Hide resolved
|
||
""" | ||
Sends a DELETE request to the REST server to delete a group. | ||
|
||
:param bearer_token: The Bearer token for authentication | ||
:param url: The URL of the group to delete | ||
:return: Response object from the server | ||
""" | ||
headers = { | ||
"Authorization": f"Bearer {bearer_token}", | ||
"Content-Type": "application/json" | ||
} | ||
|
||
response = requests.delete(url, headers=headers) | ||
return response | ||
|
||
if __name__ == "__main__": | ||
if len(sys.argv) < 2: | ||
print("Usage: python deleteGroup.py <group_name>") | ||
sys.exit(1) | ||
|
||
group_name = sys.argv[1] | ||
|
||
cluster_url = input("Enter the cluster URL (e.g. example.openpai.org): ") | ||
if not cluster_url: | ||
print("Cluster URL cannot be empty.") | ||
exit(1) | ||
|
||
token = input("Enter the bearer token: ") | ||
print("***********************************") | ||
|
||
paiurl = f"https://{cluster_url}/rest-server/api/v2/group/" | ||
|
||
url = f"{paiurl}{group_name}" # Replace with the actual URL | ||
response = delete_group(token, url) | ||
if response.status_code == 200: | ||
print("Group deleted successfully.") | ||
else: | ||
print(f"Failed to delete group. Status code: {response.status_code}, Response: {response.text}") |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.