Skip to content

Commit

Permalink
Closes-Bug: #1588999 - Upgrading New params (3.0->3.0.2) using tool
Browse files Browse the repository at this point in the history
Partial-Bug: #1590888 - Lbaas parameter typo

The offline db convert tool now provides a way to upgrade the existing new params in Server DB for renamed new params

Change-Id: I27399637342a42d7067f8de6c7dc24b4de8cf5cf
  • Loading branch information
nitishkrishna committed Jun 9, 2016
1 parent c58bf49 commit 4cac615
Show file tree
Hide file tree
Showing 3 changed files with 248 additions and 29 deletions.
12 changes: 9 additions & 3 deletions src/client/parameter-translation-dict.json
Expand Up @@ -11,9 +11,9 @@
"newformat": "string",
"oldformat": "string"
},
"enable_lbass": {
"newname": "contrail.enable_lbass",
"oldname": "enable_lbass",
"enable_lbaas": {
"newname": "contrail.enable_lbaas",
"oldname": "enable_lbaas",
"newformat": "boolean",
"oldformat": "string"
},
Expand Down Expand Up @@ -353,6 +353,12 @@
"newformat": "string",
"oldformat": "string"
},
"mysql_service_password": {
"newname": "openstack.mysql.service_password",
"oldname": "mysql_service_password",
"newformat": "string",
"oldformat": "string"
},
"metadata_secret": {
"newname": "openstack.metadata_secret",
"oldname": "metadata_secret",
Expand Down
126 changes: 126 additions & 0 deletions src/client/upgrade-parameter-translation-dict.json
@@ -0,0 +1,126 @@
{
"haproxy_enable": {
"newname": "contrail.ha.haproxy_enable",
"oldname": "contrail.ha.haproxy_flag"
},
"database_dir": {
"newname": "contrail.database.directory",
"oldname": "contrail.database.database_dir"
},
"minimum_diskGB": {
"newname": "contrail.database.minimum_diskGB",
"oldname": "contrail.database.database_minimum_diskGB"
},
"database_ip_port": {
"newname": "contrail.database.ip_port",
"oldname": "contrai.database.database_ip_port"
},
"database_ttl": {
"newname": "contrail.analytics.data_ttl",
"oldname": "contrail.analytics.analytics_data_ttl"
},
"analytics_config_audit_ttl": {
"newname": "contrail.analytics.config_audit_ttl",
"oldname": "contrail.analytics.analytics_config_audit_ttl"
},
"analytics_statistics_ttl": {
"newname": "contrail.analytics.statistics_ttl",
"oldname": "contrail.analytics.analytics_statistics_ttl"
},
"analytics_flow_ttl": {
"newname": "contrail.analytics.flow_ttl",
"oldname": "contrail.analytics.analytics_flow_ttl"
},
"analytics_syslog_port": {
"newname": "contrail.analytics.syslog_port",
"oldname": "contrail.analytics.analytics_syslog_port"
},
"analytics_data_dir": {
"newname": "contrail.analytics.data_directory",
"oldname": "contrail.analytics.analytics_data_dir"
},
"ssd_data_dir": {
"newname": "contrail.analytics.ssd_data_directory",
"oldname": "contrail.analytics.ssd_data_dir"
},
"zk_ip_port": {
"newname": "contrail.config.zookeeper_ip_port",
"oldname": "contrail.config.zk_ip_port"
},
"hc_interval": {
"newname": "contrail.config.healthcheck_interval",
"oldname": "contrail.config.hc_interval"
},
"redis_password": {
"newname": "contrail.analytics.redis_password",
"oldname": "contrail.config.redis_password"
},
"encap_priority": {
"newname": "contrail.control.encapsulation_priority",
"oldname": "contrail.control.encap_priority"
},
"vmware_ip": {
"newname": "contrail.vmware.ip",
"oldname": "contrail.vmware.vmware_ip"
},
"vmware_username": {
"newname": "contrail.vmware.username",
"oldname": "contrail.vmware.vmware_username"
},
"vmware_password": {
"newname": "contrail.vmware.password",
"oldname": "contrail.vmware.vmware_password"
},
"vmware_vswitch": {
"newname": "contrail.vmware.vswitch",
"oldname": "contrail.vmware.vmware_vswitch"
},
"vgw_public_subnet": {
"newname": "contrail.vgw.public_subnet",
"oldname": "contrail.vgw.vgw_public_subnet"
},
"vgw_public_vn_name": {
"newname": "contrail.vgw.public_vn_name",
"oldname": "contrail.vgw.vgw_public_vn_name"
},
"vgw_interface": {
"newname": "contrail.vgw.interface",
"oldname": "contrail.vgw.vgw_interface"
},
"vgw_gateway_routes": {
"newname": "contrail.vgw.gateway_routes",
"oldname": "contrail.vgw.vgw_gateway_routes"
},
"sriov_enable": {
"newname": "contrail.compute.sriov.enable",
"oldname": "contrail.openstack.sriov.enable"
},
"contrail_amqp_ip_list": {
"newname": "contrail.config.contrail_amqp_ip_list",
"oldname": "openstack.contrail_amqp_ip_list"
},
"contrail_amqp_ip_port": {
"newname": "contrail.config.contrail_amqp_port",
"oldname": "openstack.contrail_amqp_port"
},
"keystone_admin_password": {
"newname": "openstack.keystone.admin_password",
"oldname": "openstack.keystone_admin_password"
},
"keystone_admin_tenant": {
"newname": "openstack.keystone.admin_tenant",
"oldname": "openstack.keystone_admin_tenant"
},
"keystone_admin_user": {
"newname": "openstack.keystone.admin_user",
"oldname": "openstack.keystone_admin_user"
},
"mysql_root_password": {
"newname": "openstack.mysql.root_password",
"oldname": "openstack.mysql_root_password"
},
"mysql_service_password": {
"newname": "openstack.mysql.service_password",
"oldname": "openstack.mysql_service_password"
}
}
139 changes: 113 additions & 26 deletions src/server_mgr_db_convert.py
Expand Up @@ -47,7 +47,7 @@

_DEF_SMGR_DB_LOCATION = '/etc/contrail_smgr/smgr_data.db'
_DEF_TRANS_DICT_LOCATION = '/opt/contrail/server_manager/client/parameter-translation-dict.json'

_DEF_UPGRADE_TRANS_DICT_LOCATION = '/opt/contrail/server_manager/client/upgrade-parameter-translation-dict.json'
class DatabaseConvert():
def __init__(self, args_str=None):
parser = argparse.ArgumentParser(
Expand All @@ -58,12 +58,18 @@ def __init__(self, args_str=None):
help=(
"The location of the Parameter Translation Dictionary,"
" default /opt/contrail/server_manager/client/parameter-translation-dict.json"))
parser.add_argument(
"-ut", "--upgrade_translation_dict_location",
help=(
"The location of the Upgrade Parameter Translation Dictionary,"
" default /opt/contrail/server_manager/client/upgrade-parameter-translation-dict.json"))
parser.add_argument(
"-d", "--db_location",
help=(
"The location of the Server Manager Database to convert,"
" default /etc/contrail_smgr/smgr_data.db"))
serverMgrCfg = { 'db_location': _DEF_SMGR_DB_LOCATION, 'translation_dict_location': _DEF_TRANS_DICT_LOCATION }
serverMgrCfg = { 'db_location': _DEF_SMGR_DB_LOCATION, 'translation_dict_location': _DEF_TRANS_DICT_LOCATION,
'upgrade_translation_dict_location': _DEF_UPGRADE_TRANS_DICT_LOCATION }
parser.set_defaults(**serverMgrCfg)
self._args = None
self._args = parser.parse_args(args_str)
Expand All @@ -74,50 +80,131 @@ def __init__(self, args_str=None):
print "Error: " + str(e)
exit()
self.translation_dict = {}
self.upgrade_translation_dict = {}
try:
with open(str(self._args.translation_dict_location)) as json_file:
self.translation_dict = json.load(json_file)
self.old_to_new_dict = self._trans_dict_convert(self.translation_dict)
except Exception as e:
print "Cannot find translation dictionary at: " + str(self._args.translation_dict_location)
print "Error: " + str(e)
exit()
self.old_to_new_dict = self._trans_dict_convert(self.translation_dict)
try:
with open(str(self._args.upgrade_translation_dict_location)) as json_file:
self.upgrade_translation_dict = json.load(json_file)
self.upgrade_dict = self._upgrade_trans_dict_convert(self.upgrade_translation_dict)
except Exception as e:
print "Cannot find translation dictionary at: " + str(self._args.translation_dict_location)
print "Error: " + str(e)
exit()

def _trans_dict_convert(self, trans_dict):
old_to_new_dict = {}
for key in trans_dict:
old_to_new_dict[str(trans_dict[key]["oldname"])] = str(trans_dict[key]["newname"])
return old_to_new_dict

def _upgrade_trans_dict_convert(self, trans_dict):
upgrade_dict = {}
for key in trans_dict:
upgrade_dict[str(trans_dict[key]["oldname"])] = str(trans_dict[key]["newname"])
return upgrade_dict

def _find_key_in_dict(self, dict_to_search, search_key):
if search_key in dict_to_search.keys():
return dict_to_search
else:
for key in dict_to_search.keys():
if isinstance(dict_to_search[key], dict):
dict_found = self._find_key_in_dict(dict_to_search[key], search_key)
if dict_found:
return dict_found
return None

def _set_key_in_dict(self, dict_to_set, key_to_set, value):
key_to_set_list = key_to_set.split('.')
last_level = key_to_set_list[-1]
key_set = 0
iter_list = iter(key_to_set_list)
key_to_set = next(iter_list)
while not key_set:
if key_to_set in dict_to_set.keys():
if key_to_set == str(last_level):
dict_to_set[key_to_set] = value
key_set = 1
else:
dict_to_set = dict_to_set[key_to_set]
key_to_set = next(iter_list)
else:
if key_to_set == str(last_level):
dict_to_set[key_to_set] = value
key_set = 1
else:
dict_to_set[key_to_set] = {}
dict_to_set = dict_to_set[key_to_set]
key_to_set = next(iter_list)


def old_to_new_convert(self, cluster_id, cluster_params):
if not cluster_id or not cluster_params:
raise ValueError('Invalid cluster_id and or cluster_params')
discard_key_list = []
new_cluster_params = {}
if "provision" not in cluster_params:
for old_key in cluster_params:
if old_key in self.old_to_new_dict:
new_key = self.old_to_new_dict[str(old_key)]
split_dest_v_name = new_key.split('.')
tmp_dict = new_cluster_params
for level in split_dest_v_name[:-1]:
if level not in tmp_dict.keys():
tmp_dict[str(level)] = {}
tmp_dict = tmp_dict[str(level)]
tmp_dict[split_dest_v_name[-1]] = cluster_params[old_key]
discard_key_list.append(old_key)
cluster_params['provision'] = new_cluster_params
for key in discard_key_list:
cluster_params[key] = None
modified_cluster = {'id': cluster_id, 'parameters': None}
modified_cluster = {'id': cluster_id, 'parameters': cluster_params}
#print "Cluster: " + str(cluster_id) + " Params: " + str(json.dumps(cluster_params,sort_keys=True,indent=4)) + "\n"
try:
self._serverDb.modify_cluster(modified_cluster)
except Exception as e:
print "Exception: " + str(e)

def new_params_upgrade(self, cluster_id, cluster_params):
if not cluster_id or not cluster_params:
raise ValueError('Invalid cluster_id and or cluster_params')
discard_key_list = []
new_cluster_params = {}
if "provision" in cluster_params:
for old_key_name, new_key_name in self.upgrade_dict.iteritems():
old_key_name = str((old_key_name.split('.'))[-1])
dict_to_set = self._find_key_in_dict(cluster_params["provision"], old_key_name)
if dict_to_set:
val_to_set = dict_to_set[old_key_name]
dict_to_set[old_key_name] = None
self._set_key_in_dict(cluster_params["provision"], new_key_name, val_to_set)
modified_cluster = {'id': cluster_id, 'parameters': None}
modified_cluster = {'id': cluster_id, 'parameters': cluster_params}
#print "Cluster: " + str(cluster_id) + " Params: " + str(json.dumps(cluster_params,sort_keys=True,indent=4)) + "\n"
try:
self._serverDb.modify_cluster(modified_cluster)
except Exception as e:
print "Exception: " + str(e)

def convert(self):
clusters = self._serverDb.get_cluster({},detail=True)
for cluster in clusters:
new_cluster_params = {}
discard_key_list = []
cluster_params = eval(cluster['parameters'])
cluster_id = str(cluster['id'])
print str(cluster_id) + "\n"
print str(cluster_params) + "\n"
if "provision" not in cluster_params:
for old_key in cluster_params:
if old_key in self.old_to_new_dict:
new_key = self.old_to_new_dict[str(old_key)]
split_dest_v_name = new_key.split('.')
tmp_dict = new_cluster_params
for level in split_dest_v_name[:-1]:
if level not in tmp_dict.keys():
tmp_dict[str(level)] = {}
tmp_dict = tmp_dict[str(level)]
tmp_dict[split_dest_v_name[-1]] = cluster_params[old_key]
discard_key_list.append(old_key)
cluster_params['provision'] = new_cluster_params
for key in discard_key_list:
cluster_params[key] = None
modified_cluster = {'id': cluster_id, 'parameters': None}
modified_cluster = {'id': cluster_id, 'parameters': cluster_params}
try:
self._serverDb.modify_cluster(modified_cluster)
except Exception as e:
print "Exception: " + str(e)
print str(cluster_params) + "\n"
self.old_to_new_convert(cluster_id, cluster_params)
else:
self.new_params_upgrade(cluster_id, cluster_params)

def main(args_str=None):
db_convert = DatabaseConvert(args_str)
Expand Down

0 comments on commit 4cac615

Please sign in to comment.