Hi
I try to integrate alert manager in Rocket Chat(ver 0.65) with this instructuctions
In RocketChat logs ha sappear an error.
Please help me
Thank You
34mI20180529-07:54:52.029(-4) rocketchat_logger rocketchat_logger.js:278 e[31mIntegrations ➔ Incoming WebHook.error script.js:1 var Script=function(){function Script(){}var _proto=Script.prototype;_proto.process_incoming_request=function process_incoming_request(_ref){var request=_ref.request;var alertColor=“warning”;if(request.content.status==“resolved”){alertColor=“good”}else if(request.content.status==“firing”){alertColor=“danger”}console.log(request.content);var finFields= ;for(i=0;i<request.content.alerts.length;i++){var endVal=request.content.alerts[i];var elem={title:"alertname: "+endVal.labels.alertname,value:"instance: "+endVal.labels.instance,“short”:false};finFields.push(elem);finFields.push({title:“description”,value:endVal.annotations.description});finFields.push({title:“summary”,value:endVal.annotations.summary})}return{content:{username:“Prometheus Alert”,attachments:[{color:alertColor,title_link:request.content.externalURL,title:“Prometheus notification”,fields:finFields}]}};return{error:{success:false}}};return Script}();
That does not appear to be the full error, if you don’t mind can you please copy the surrounding logs so we can further see what the issue is.
Thank you for answering to me
This is all error, with alert from alert manager
34mI20180529-07:54:52.026(-4) rocketchat_logger rocketchat_logger.js:278 e[31mIntegrations ➔ Incoming WebHook.error [Error running Script in Trigger prometheus :]
e[34mI20180529-07:54:52.028(-4) rocketchat_logger rocketchat_logger.js:278 e[31mIntegrations ➔ Incoming WebHook.error var Script=function(){function Script(){}var _proto=Script.prototype;_proto.process_incoming_request=function process_incoming_request(_ref){var request=_ref.request;var alertColor="warning";if(request.content.status=="resolved"){alertColor="good"}else if(request.content.status=="firing"){alertColor="danger"}console.log(request.content);var finFields=[];for(i=0;i<request.content.alerts.length;i++){var endVal=request.content.alerts[i];var elem={title:"alertname: "+endVal.labels.alertname,value:"*instance:* "+endVal.labels.instance,"short":false};finFields.push(elem);finFields.push({title:"description",value:endVal.annotations.description});finFields.push({title:"summary",value:endVal.annotations.summary})}return{content:{username:"Prometheus Alert",attachments:[{color:alertColor,title_link:request.content.externalURL,title:"Prometheus notification",fields:finFields}]}};return{error:{success:false}}};return Script}();
e[34mI20180529-07:54:52.029(-4) rocketchat_logger rocketchat_logger.js:278 e[31mIntegrations ➔ Incoming WebHook.error [Stack:]
e[34mI20180529-07:54:52.029(-4) rocketchat_logger rocketchat_logger.js:278 e[31mIntegrations ➔ Incoming WebHook.error script.js:1 var Script=function(){function Script(){}var _proto=Script.prototype;_proto.process_incoming_request=function process_incoming_request(_ref){var request=_ref.request;var alertColor="warning";if(request.content.status=="resolved"){alertColor="good"}else if(request.content.status=="firing"){alertColor="danger"}console.log(request.content);var finFields=[];for(i=0;i<request.content.alerts.length;i++){var endVal=request.content.alerts[i];var elem={title:"alertname: "+endVal.labels.alertname,value:"*instance:* "+endVal.labels.instance,"short":false};finFields.push(elem);finFields.push({title:"description",value:endVal.annotations.description});finFields.push({title:"summary",value:endVal.annotations.summary})}return{content:{username:"Prometheus Alert",attachments:[{color:alertColor,title_link:request.content.externalURL,title:"Prometheus notification",fields:finFields}]}};return{error:{success:false}}};return Script}(); TypeError: Cannot read property 'length' of undefined at Script.process_incoming_request (script.js:1:390) at evalmachine.<anonymous>:1:8 at ContextifyScript.Script.runInContext (vm.js:59:29) at ContextifyScript.Script.runInNewContext (vm.js:65:15) at Object.runInNewContext (vm.js:135:38) at Object.executeIntegrationRest [as action] (meteor://💻app/packages/rocketchat_integrations.js:2401:25) at Route.share.Route.Route._callEndpoint (meteor://💻app/packages/nimble_restivus.js:347:32) at meteor://💻app/packages/nimble_restivus.js:236:33 at packages/simple_json-routes.js:98:9
{ receiver: 'rocketchat', status: 'firing', alerts: [ { status: 'firing', labels: [Object], annotations: {}, startsAt: '2018-05-29T14:08:57.860711149+03:00', endsAt: '0001-01-01T00:00:00Z', generatorURL: 'http://a.b.c.d:9090/graph?g0.expr=up%7Bjob%3D%22node%22%7D+%3D%3D+0&g0.tab=1' } ], groupLabels: { alertname: 'InstanceDown' }, commonLabels: { alertname: 'InstanceDown', instance: 'a.b.c.d:9100', job: 'node' }, commonAnnotations: {}, externalURL: 'http://a.b.c.d:9093', version: '4', groupKey: '{}:{alertname="InstanceDown"}' }
Can you get an example of the payload sent? Looks like the property alerts might be missing
Thank ou from your response
{ receiver: ‘rocketchat’, status: ‘firing’, alerts: [ { status: ‘firing’, labels: [Object], annotations: {}, startsAt: ‘2018-05-29T14:08:57.860711149+03:00’, endsAt: ‘0001-01-01T00:00:00Z’, generatorURL: ‘[http://a.b.c.d:9090/graph?g0.expr=up{job%3D"node"}+%3D%3D+0&g0.tab=1](http://a.b.c.d:9090/graph?g0.expr=up%7Bjob%3D%22node%22%7D+%3D%3D+0&g0.tab=1)’ } ], groupLabels: { alertname: ‘InstanceDown’ }, commonLabels: { alertname: ‘InstanceDown’, instance: ‘a.b.c.d:9100’, job: ‘node’ }, commonAnnotations: {}, externalURL: ‘http://a.b.c.d:9093’, version: ‘4’, groupKey: ‘{}:{alertname=“InstanceDown”}’ }
This is alert sended from alertmanager
All settings are done from example gived in githud(prometheus and alertmanager)
I see on youtube an example which works, but Rocket Chat is version 0.63.3
I use 0.64.2(I believe) and upgraded to 0.65
And don’t work
i have the same error , i have no answer
Rocket Chat(ver 0.69)
aermike
October 16, 2018, 11:24am
7
Hi
try config alert manager
Alert for any instance that is unreachable for >5 minutes.
alert: InstanceDown
expr: up == 0
for: 5m
labels:
severity: page
annotations:
summary: “Instance {{ $labels.instance }} down”
description: “{{ $labels.instance }} of job {{ $labels.job }} has been down for more than 5 minutes.”
Alert for any instance that has a median request latency >1s.
alert: APIHighRequestLatency
expr: api_http_request_latencies_second{quantile=“0.5”} > 1
for: 10m
annotations:
summary: “High request latency on {{ $labels.instance }}”
description: “{{ $labels.instance }} has a median request latency above 1s (current value: {{ $value }}s)”
route:
repeat_interval: 30s
group_interval: 30s
receiver: ‘rocketchat’
receivers:
- name: ‘rocketchat’
webhook_configs:
- send_resolved: false
url: 'http://rocketchatserverip/hooks/rocketchar generated hooks"
1 Like