Skip to content

Commit

Permalink
bug fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
sasgit-hub committed Feb 24, 2025
1 parent d5c6d0c commit 1680f37
Show file tree
Hide file tree
Showing 9 changed files with 33 additions and 23 deletions.
4 changes: 2 additions & 2 deletions psconfig/perfsonar-psconfig/psconfig/client/pscheduler/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ def participants(self, val=None):

def participant_data(self, val=None):
if val is not None:
self.data['participant_data'] = val
return self.data.get('participant_data', None)
self.data['participant-data'] = val
return self.data.get('participant-data', None)

def participant_data_full(self, val=None):
if val is not None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ def tool(self, val=None):

def lead_bind(self, val=None):
if val is not None:
self.data['lead_bind'] = val
return self.data.get('lead_bind', None)
self.data['lead-bind'] = val
return self.data.get('lead-bind', None)

def reference(self, val=None):
if val is not None:
Expand Down Expand Up @@ -664,9 +664,9 @@ def get_lead(self):

#set lead bind address if we have map set - only set it if we are local (first participant None) or explicitly call out the address
if lead and self.lead_bind_map and self.lead_bind_map.get(lead):
self.lead_bind = self.lead_bind_map[lead]
self.lead_bind(self.lead_bind_map[lead])
elif self.lead_bind_map and self.lead_bind_map.get('_default'):
self.lead_bind = self.lead_bind_map['_default']
self.lead_bind(self.lead_bind_map['_default'])

return lead

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -556,7 +556,7 @@ def log_info(self, msg, local_context=None):

def log_warn(self, msg, local_context=None):
if self.logger:
self.logger.warn(self.logf.format(msg, local_context))
self.logger.warning(self.logf.format(msg, local_context))
elif self.debug:
print(msg)
print('\n')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@ def maintainer(self, val=None):

def scheduling_class(self, val=None):
if val is not None:
self.data['scheduling_class'] = val
self.data['scheduling-class'] = val

return self.data.get('scheduling_class', None)
return self.data.get('scheduling-class', None)



Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def _merge_configs(self, psconfig1, psconfig2):
for psconfig2_key in psconfig2.data.get(field).keys():
if psconfig1.data[field].get(psconfig2_key, None):
if self.logger:
self.logger.warn("PSConfig merge: Skipping {} field's {} because it already exists".format(field, psconfig2_key))
self.logger.warning("PSConfig merge: Skipping {} field's {} because it already exists".format(field, psconfig2_key))
else:
psconfig1.data[field][psconfig2_key] = psconfig2.data[field][psconfig2_key]

Expand Down
12 changes: 11 additions & 1 deletion psconfig/perfsonar-psconfig/psconfig/client/psconfig/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,17 @@ def psconfig_json_schema(self):
"ttl": { "$ref": "#/pSConfig/Duration" },
"_meta": { "$ref": "#/pSConfig/AnyJSON" },
"label": { "type": "string" },
"schema": { "$ref": "#/pSConfig/Cardinal" }
"runs": {
"type": "string",
"enum": [
"all",
"succeeded",
"failed"
]
},
"schema": { "$ref": "#/pSConfig/Cardinal" },
"uri-host": { "$ref": "#/pSConfig/URLHostPort" }

},
"additionalProperties": False,
"required": [ "archiver", "data"]
Expand Down
20 changes: 10 additions & 10 deletions psconfig/perfsonar-psconfig/psconfig/grafana/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def _run_start(self, agent_conf):
self.logger.debug(self.logf.format("No grafana-url specified. Defaulting to {}".format(default)))
agent_conf.grafana_url(default)
if not (agent_conf.grafana_token() or (agent_conf.grafana_user() and agent_conf.grafana_password())):
self.logger.warn(self.logf.format("No grafana-token or grafana-user/grafana-password specified. Unless your grafana instance does not require authentication, then your attempts to create dashboards may fail ".format(default)))
self.logger.warning(self.logf.format("No grafana-token or grafana-user/grafana-password specified. Unless your grafana instance does not require authentication, then your attempts to create dashboards may fail ".format(default)))
self.grafana_url = agent_conf.grafana_url()
self.grafana_token = agent_conf.grafana_token()
self.grafana_user = agent_conf.grafana_user()
Expand Down Expand Up @@ -216,7 +216,7 @@ def _run_handle_psconfig(self, psconfig, agent_conf, remote=None):
tg.start()
tg.next()
if tg.expanded_test is None:
self.logger.warn(self.logf.format("Task {} does not have a valid test definition. Skipping.".format(task_name)))
self.logger.warning(self.logf.format("Task {} does not have a valid test definition. Skipping.".format(task_name)))
continue
expanded_test = Test(data=tg.expanded_test)
#get archives with template vars filled-in
Expand Down Expand Up @@ -249,7 +249,7 @@ def _run_handle_psconfig(self, psconfig, agent_conf, remote=None):

# make sure we have at least one matching display
if not matching_display_config:
self.logger.warn(self.logf.format("No display config for task {}. Skipping.".format(task_name)))
self.logger.warning(self.logf.format("No display config for task {}. Skipping.".format(task_name)))
continue

##
Expand All @@ -272,11 +272,11 @@ def _run_handle_psconfig(self, psconfig, agent_conf, remote=None):
# Build the rows and columns from the pSConfig data
group = psconfig.group(task.group_ref())
if not group:
self.logger.warn(self.logf.format("Invalid group name {}. Check for typos in your pSConfig template file. Skipping task {}.".format(task.group_ref(), task_name)))
self.logger.warning(self.logf.format("Invalid group name {}. Check for typos in your pSConfig template file. Skipping task {}.".format(task.group_ref(), task_name)))
continue
#TODO: Support single dimension?
if group.dimension_count() != 2:
self.logger.warn(self.logf.format("Only support groups with 2 dimensions. Skipping task {}.".format(task_name)))
self.logger.warning(self.logf.format("Only support groups with 2 dimensions. Skipping task {}.".format(task_name)))
continue

##
Expand Down Expand Up @@ -323,14 +323,14 @@ def _run_handle_psconfig(self, psconfig, agent_conf, remote=None):
mdc_var_obj["grafana_datasource_name"] = mdc.datasource_name()
mdc_var_obj["grafana_datasource"] = self.grafana_datasource_by_name.get(mdc.datasource_name(), None)
if not mdc_var_obj["grafana_datasource"]:
self.logger.warn(self.logf.format("'datasource_selector' is not auto and grafana_datasource_name '{}' does not exist on Grafana server. Skipping.".format(mdc.datasource_name())))
self.logger.warning(self.logf.format("'datasource_selector' is not auto and grafana_datasource_name '{}' does not exist on Grafana server. Skipping.".format(mdc.datasource_name())))
continue
elif mdc.datasource_selector() == 'manual' and self.grafana_datasource:
#use the manally defined datasource in the agent config
mdc_var_obj["grafana_datasource"] = self.grafana_datasource
mdc_var_obj["grafana_datasource_name"] = self.grafana_datasource_name
else:
self.logger.warn(self.logf.format("'datasource_selector' is not auto and no grafana_datasource_name is defined for display {}. Skipping.".format(mdc_name)))
self.logger.warning(self.logf.format("'datasource_selector' is not auto and no grafana_datasource_name is defined for display {}. Skipping.".format(mdc_name)))
continue

#make sure datasource was actually set
Expand Down Expand Up @@ -689,7 +689,7 @@ def _gf_find_folder(self, name):
'''
r, msg = self._gf_http("/api/folders", "list_folders")
if msg:
self.logger.warn(self.logf.format("Unable to list grafana folders: {}".format(msg)))
self.logger.warning(self.logf.format("Unable to list grafana folders: {}".format(msg)))
else:
for folder in r.json():
if name == folder.get("title", None):
Expand All @@ -703,7 +703,7 @@ def _gf_create_folder(self, name):
'''
r, msg = self._gf_http("/api/folders", "create_folder", method="post", data={"title": name})
if msg:
self.logger.warn(self.logf.format("Unable to create grafana folder: {}".format(msg)))
self.logger.warning(self.logf.format("Unable to create grafana folder: {}".format(msg)))
return

return r.json().get("uid", None)
Expand All @@ -714,7 +714,7 @@ def _gf_set_home_dashboard(self, uid):
'''
r, msg = self._gf_http("/api/org/preferences", "set_home_dashboard", method="patch", data={"homeDashboardUID": uid})
if msg:
self.logger.warn(self.logf.format("Unable to set home dashboard: {}".format(msg)))
self.logger.warning(self.logf.format("Unable to set home dashboard: {}".format(msg)))
return

return r.json()
Expand Down
2 changes: 1 addition & 1 deletion psconfig/perfsonar-psconfig/psconfig/hostmetrics/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def _run_end(self, agent_conf):
if restart_result.returncode == 0:
self.logger.debug(self.logf.format("Service {} restarted".format(agent_conf.restart_service())))
else:
self.logger.warn(self.logf.format("Service {} restart attempt failed".format(agent_conf.restart_service())))
self.logger.warning(self.logf.format("Service {} restart attempt failed".format(agent_conf.restart_service())))
else:
self.logger.debug(self.logf.format("No changes so file not updated.", {
"old_file_checksum": old_file_checksum,
Expand Down
2 changes: 1 addition & 1 deletion psconfig/perfsonar-psconfig/psconfig/pscheduler/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def _run_end(self, agent_conf):
##
#Log results
for error in task_manager.errors:
self.logger.warn(self.logf.format(error))
self.logger.warning(self.logf.format(error))

for deleted_task in task_manager.deleted_tasks:
self.logger.debug(self.logf.format("Deleted task " + str(deleted_task.uuid) + " on server " + str(deleted_task.url)))
Expand Down

0 comments on commit 1680f37

Please sign in to comment.