Account transfer from Regular to Key Pair
Is it possible to mass transfer Datasets from a regular Snowflake Connection account to a Key Pair connector account?
Best Answer
-
In Domo, there isn't a built-in feature to directly mass transfer datasets from one connector to another. You would need to come up with a process. You could try automating the process through Domo's API. But I think I would attempt it in Python.
Authenticate
import requests
# Domo API credentials
client_id = 'YOUR_CLIENT_ID'
client_secret = 'YOUR_CLIENT_SECRET'
auth_url = 'https://api.domo.com/oauth/token?grant_type=client_credentials&scope=data'
# Get access token
response = requests.get(auth_url, auth=(client_id, client_secret))
access_token = response.json().get('access_token')
headers = {
'Authorization': f'Bearer {access_token}',
'Content-Type': 'application/json'
}List Datasets that use the old connector
# Get list of datasets
datasets_url = 'https://api.domo.com/v1/datasets'
params = {'limit': 50} # Adjust the limit as needed
datasets = requests.get(datasets_url, headers=headers, params=params).json()
# Filter datasets by Snowflake connection (you may need to add additional filtering logic)
old_connection_id = 'OLD_CONNECTION_ID'
datasets_to_migrate = [ds for ds in datasets if ds['dataSource']['id'] == old_connection_id]Recreate datasets with new connector
new_connection_id = 'NEW_KEY_PAIR_CONNECTION_ID'
for dataset in datasets_to_migrate:
new_dataset_payload = {
'name': dataset['name'],
'dataSource': {
'id': new_connection_id
},
'schema': dataset['schema'],
'sql': dataset['sql'] # Adjust based on how the query is stored
}
new_dataset_url = 'https://api.domo.com/v1/datasets'
response = requests.post(new_dataset_url, headers=headers, json=new_dataset_payload)
if response.status_code == 201:
print(f"Dataset {dataset['name']} recreated successfully.")
else:
print(f"Failed to create dataset {dataset['name']}: {response.text}")Replace references in dataflows and dashboards
# Example to replace dataset references (conceptual)
def update_references(old_dataset_id, new_dataset_id):
# Get all dataflows using the old dataset
dataflows_url = f'https://api.domo.com/v1/dataflows?includeDataSourceId={old_dataset_id}'
dataflows = requests.get(dataflows_url, headers=headers).json()
# Update each dataflow to use the new dataset
for dataflow in dataflows:
for input_ds in dataflow['inputs']:
if input_ds['id'] == old_dataset_id:
input_ds['id'] = new_dataset_id
# Send the updated dataflow back to Domo
update_url = f'https://api.domo.com/v1/dataflows/{dataflow["id"]}'
response = requests.put(update_url, headers=headers, json=dataflow)
if response.status_code == 200:
print(f"Dataflow {dataflow['name']} updated successfully.")
else:
print(f"Failed to update dataflow {dataflow['name']}: {response.text}")** Was this post helpful? Click Agree or Like below. **
** Did this solve your problem? Accept it as a solution! **0
Answers
-
In Domo, there isn't a built-in feature to directly mass transfer datasets from one connector to another. You would need to come up with a process. You could try automating the process through Domo's API. But I think I would attempt it in Python.
Authenticate
import requests
# Domo API credentials
client_id = 'YOUR_CLIENT_ID'
client_secret = 'YOUR_CLIENT_SECRET'
auth_url = 'https://api.domo.com/oauth/token?grant_type=client_credentials&scope=data'
# Get access token
response = requests.get(auth_url, auth=(client_id, client_secret))
access_token = response.json().get('access_token')
headers = {
'Authorization': f'Bearer {access_token}',
'Content-Type': 'application/json'
}List Datasets that use the old connector
# Get list of datasets
datasets_url = 'https://api.domo.com/v1/datasets'
params = {'limit': 50} # Adjust the limit as needed
datasets = requests.get(datasets_url, headers=headers, params=params).json()
# Filter datasets by Snowflake connection (you may need to add additional filtering logic)
old_connection_id = 'OLD_CONNECTION_ID'
datasets_to_migrate = [ds for ds in datasets if ds['dataSource']['id'] == old_connection_id]Recreate datasets with new connector
new_connection_id = 'NEW_KEY_PAIR_CONNECTION_ID'
for dataset in datasets_to_migrate:
new_dataset_payload = {
'name': dataset['name'],
'dataSource': {
'id': new_connection_id
},
'schema': dataset['schema'],
'sql': dataset['sql'] # Adjust based on how the query is stored
}
new_dataset_url = 'https://api.domo.com/v1/datasets'
response = requests.post(new_dataset_url, headers=headers, json=new_dataset_payload)
if response.status_code == 201:
print(f"Dataset {dataset['name']} recreated successfully.")
else:
print(f"Failed to create dataset {dataset['name']}: {response.text}")Replace references in dataflows and dashboards
# Example to replace dataset references (conceptual)
def update_references(old_dataset_id, new_dataset_id):
# Get all dataflows using the old dataset
dataflows_url = f'https://api.domo.com/v1/dataflows?includeDataSourceId={old_dataset_id}'
dataflows = requests.get(dataflows_url, headers=headers).json()
# Update each dataflow to use the new dataset
for dataflow in dataflows:
for input_ds in dataflow['inputs']:
if input_ds['id'] == old_dataset_id:
input_ds['id'] = new_dataset_id
# Send the updated dataflow back to Domo
update_url = f'https://api.domo.com/v1/dataflows/{dataflow["id"]}'
response = requests.put(update_url, headers=headers, json=dataflow)
if response.status_code == 200:
print(f"Dataflow {dataflow['name']} updated successfully.")
else:
print(f"Failed to update dataflow {dataflow['name']}: {response.text}")** Was this post helpful? Click Agree or Like below. **
** Did this solve your problem? Accept it as a solution! **0 -
@ArborRose Thanks for the idea! I will give it a try
0
Categories
- All Categories
- 1.8K Product Ideas
- 1.8K Ideas Exchange
- 1.5K Connect
- 1.2K Connectors
- 296 Workbench
- 6 Cloud Amplifier
- 8 Federated
- 2.9K Transform
- 99 SQL DataFlows
- 614 Datasets
- 2.2K Magic ETL
- 3.8K Visualize
- 2.5K Charting
- 727 Beast Mode
- 53 App Studio
- 40 Variables
- 677 Automate
- 173 Apps
- 451 APIs & Domo Developer
- 45 Workflows
- 8 DomoAI
- 34 Predict
- 14 Jupyter Workspaces
- 20 R & Python Tiles
- 394 Distribute
- 113 Domo Everywhere
- 275 Scheduled Reports
- 6 Software Integrations
- 121 Manage
- 118 Governance & Security
- Domo Community Gallery
- 32 Product Releases
- 10 Domo University
- 5.4K Community Forums
- 40 Getting Started
- 30 Community Member Introductions
- 108 Community Announcements
- 4.8K Archive