I have this python telemetry scraper from Confluent:
import os
import time
import json
import urllib3
import boto3
import base64
from botocore.exceptions import ClientError
import requests
from datetime import datetime, timedelta
def send_telemetry_request(username, password):
url = "https://api.telemetry.confluent.cloud/v2/metrics/cloud/query"
headers = {
'Content-Type': 'application/json',
}
auth = (username, password)
body = {
"aggregations": [{"metric": "io.confluent.kafka.server/request_count"}],
"filter": {
"op": "OR",
"filters": [{"field": "resource.kafka.id", "op": "EQ", "value": "$CLUSTER"}]
},
"granularity": "PT1M",
"intervals": [f"{start_time_iso}/{end_time_iso}"],
"limit": 1000
}
try:
response = requests.post(url, headers=headers, auth=auth, json=body)
if response.status_code == 200:
print("Request successful!")
return response.json() # Return the JSON content if the status code is 200
# You can process the response content here if needed
else:
print(f"Request failed with status code: {response.status_code}")
print(response.text)
except Exception as e:
print(f"An error occurred: {e}")
I need to update "intervals" in the body so it just gets latest data every minute.
If define intervals in the body:
"intervals": ["2024-01-29T12:29:00-06:00/2024-01-29T13:29:00-06:00"],
It works fine