Skip to content

Commit 56893ac

Browse files
various (#4)
* add list of default metrics in readme * readme updates * various updates
1 parent bd81088 commit 56893ac

File tree

9 files changed

+104
-181
lines changed

9 files changed

+104
-181
lines changed

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@ All three exporters (metrics, log, and trace) can be configured in the same file
1515

1616
The old version of the metrics exporter can be found in the [old implementation branch][old implementation branch] and the new metrics exporter implementation is backward compatible such that the same configuration for both database connection and metrics definition can be used.
1717

18+
Users are encouraged to open issues and enhancements requests against this github repos and feel free to ask any questions. We will actively work on them as we will the development of the exporters.
19+
1820
### Build
1921

2022
Build without running tests using the following.

examples/README.md

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -4,22 +4,3 @@
44
Please refer to the Unified Observability in Grafana with converged Oracle Database Workshop at http://bit.ly/unifiedobservability and it's corresponding repos https://github.com/oracle/microservices-datadriven/tree/main/grabdish/observability/db-metrics-exporter for complete examples.
55

66
More examples will be provided here in the near future.
7-
8-
# Metrics exporter
9-
10-
1. Pre-req. Run setup for the GrabDish workshop including observability lab steps to install and configure Grafana and Prometheus
11-
2. Run `./deploy.sh` in this directory
12-
3. `curl http://observability-exporter-example:8080/metrics` from within cluster to see Prometheus stats
13-
4. View same stats from within Grafana by loading AQ dashboard
14-
15-
The same can be done above for TEW by simply replace `aq` with `teq` in the deployment and configmap yamls
16-
17-
Troubleshooting...
18-
19-
kubectl port-forward prometheus-stable-kube-prometheus-sta-prometheus-0 -n msdataworkshop 9090:9090
20-
21-
# Logs exporter
22-
23-
# Trace exporter
24-
25-
# Combined Metrics, Logs, and Trace exporter

examples/deploy.sh

Lines changed: 0 additions & 62 deletions
This file was deleted.

examples/observability-exporter-example-service-monitor.yaml

Lines changed: 0 additions & 14 deletions
This file was deleted.

examples/observability-exporter-example-service.yaml

Lines changed: 0 additions & 14 deletions
This file was deleted.

src/main/java/oracle/observability/ObservabilityExporter.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ public class ObservabilityExporter {
2626
public String VAULT_SECRET_OCID = System.getenv("VAULT_SECRET_OCID"); //eg ocid....
2727
public String OCI_CONFIG_FILE = System.getenv("OCI_CONFIG_FILE"); //eg "~/.oci/config"
2828
public String OCI_PROFILE = System.getenv("OCI_PROFILE"); //eg "DEFAULT"
29+
public static final String CONTEXT = "context";
30+
public static final String REQUEST = "request";
2931

3032
PoolDataSource observabilityDB;
3133
public PoolDataSource getPoolDataSource() throws SQLException {

src/main/java/oracle/observability/logs/LogsExporter.java

Lines changed: 23 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,16 @@
1818
@RestController
1919
public class LogsExporter extends ObservabilityExporter implements Runnable {
2020

21-
private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(LogsExporter.class);
21+
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(LogsExporter.class);
22+
public static final String TIMESTAMPFIELD = "timestampfield";
23+
public static final String LOG = "log";
2224
public String LOG_INTERVAL = System.getenv("LOG_INTERVAL"); // "30s"
2325
private int logInterval = 30;
2426
List<String> lastLogged = new ArrayList<>();
2527
private java.sql.Timestamp alertLogQueryLastLocalDateTime;
2628

29+
private int consecutiveExceptionCount = 0; //used to backoff todo should be a finer/log entry level rather than global
30+
2731

2832
@PostConstruct
2933
public void init() throws Exception {
@@ -34,26 +38,33 @@ public void init() throws Exception {
3438
public void run() {
3539
while (true) {
3640
try {
37-
LOG.debug("LogExporter default metrics from:" + DEFAULT_METRICS);
41+
Thread.sleep(consecutiveExceptionCount * 1000);
42+
Thread.sleep(logInterval * 1000);
43+
LOGGER.debug("LogsExporter default metrics from:" + DEFAULT_METRICS);
3844
if(LOG_INTERVAL!=null && !LOG_INTERVAL.trim().equals("")) logInterval = Integer.getInteger(LOG_INTERVAL);
39-
LOG.debug("LogExporter logInterval:" + logInterval);
45+
LOGGER.debug("LogsExporter logInterval:" + logInterval);
4046
File tomlfile = new File(DEFAULT_METRICS);
4147
TomlMapper mapper = new TomlMapper();
4248
JsonNode jsonNode = mapper.readerFor(LogsExporterConfigEntry.class).readTree(new FileInputStream(tomlfile));
43-
JsonNode log = jsonNode.get("log");
49+
JsonNode log = jsonNode.get(LOG);
4450
if(log == null || log.isEmpty()) {
45-
LOG.info("No logs records configured");
51+
LOGGER.info("No logs records configured");
4652
return;
4753
}
4854
Iterator<JsonNode> logs = log.iterator();
4955
List<String> currentLogged = new ArrayList<>();
5056
try (Connection connection = getPoolDataSource().getConnection()) {
5157
while (logs.hasNext()) { //for each "log" entry in toml/config...
5258
JsonNode next = logs.next();
53-
String request = next.get("request").asText(); // the sql query
54-
LOG.debug("DBLogsExporter. request:" + request);
55-
String timestampfield = next.get("timestampfield").asText(); // eg ORIGINATING_TIMESTAMP
56-
LOG.debug("DBLogsExporter. timestampfield:" + timestampfield);
59+
String request = next.get(REQUEST).asText(); // the sql query
60+
LOGGER.debug("LogsExporter request:" + request);
61+
JsonNode timestampfieldNode = next.get(TIMESTAMPFIELD);
62+
if (timestampfieldNode==null) {
63+
LOGGER.warn("LogsExporter entry does not contain `timestampfield' value request:" + request);
64+
continue;
65+
}
66+
String timestampfield = timestampfieldNode.asText(); // eg ORIGINATING_TIMESTAMP
67+
LOGGER.debug("LogsExporter timestampfield:" + timestampfield);
5768
PreparedStatement statement = connection.prepareStatement(
5869
alertLogQueryLastLocalDateTime == null ? request : request + " WHERE " + timestampfield + " > ?");
5970
if(alertLogQueryLastLocalDateTime!=null) statement.setTimestamp(1, alertLogQueryLastLocalDateTime);
@@ -82,10 +93,11 @@ public void run() {
8293
}
8394
}
8495
lastLogged = currentLogged;
96+
consecutiveExceptionCount = 0;
8597
}
86-
Thread.sleep(logInterval * 1000);
8798
} catch (Exception e) {
88-
throw new RuntimeException(e);
99+
consecutiveExceptionCount++;
100+
LOGGER.warn("LogsExporter.processMetric exception:" + e);
89101
}
90102
}
91103
}

src/main/java/oracle/observability/metrics/MetricsExporter.java

Lines changed: 30 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -24,13 +24,19 @@
2424
@RestController
2525
public class MetricsExporter extends ObservabilityExporter {
2626

27+
private static final Logger LOGGER = LoggerFactory.getLogger(MetricsExporter.class);
28+
public static final String UP = "up";
29+
public static final String METRICSTYPE = "metricstype";
30+
public static final String METRICSDESC = "metricsdesc";
31+
public static final String LABELS = "labels";
32+
public static final String IGNOREZERORESULT = "ignorezeroresult";
33+
public static final String FALSE = "false";
2734
public String LISTEN_ADDRESS = System.getenv("LISTEN_ADDRESS"); // ":9161"
2835
public String TELEMETRY_PATH = System.getenv("TELEMETRY_PATH"); // "/metrics"
2936
//Interval between each scrape. Default is to scrape on collect requests. scrape.interval
3037
public String SCRAPE_INTERVAL = System.getenv("scrape.interval"); // "0s"
3138
public static final String ORACLEDB_METRIC_PREFIX = "oracledb_";
3239
Map<String, Gauge> gaugeMap = new HashMap<>();
33-
private static final Logger LOG = LoggerFactory.getLogger(MetricsExporter.class);
3440

3541
/**
3642
* The endpoint that prometheus will scrape
@@ -54,7 +60,7 @@ private void processMetrics() throws IOException, SQLException {
5460
JsonNode jsonNode = mapper.readerFor(MetricsExporterConfigEntry.class).readTree(new FileInputStream(tomlfile));
5561
JsonNode metric = jsonNode.get("metric");
5662
if(metric == null || metric.isEmpty()) {
57-
LOG.info("No logs records configured");
63+
LOGGER.info("No logs records configured");
5864
return;
5965
}
6066
Iterator<JsonNode> metrics = metric.iterator();
@@ -65,11 +71,11 @@ private void processMetrics() throws IOException, SQLException {
6571
processMetric(connection, metrics);
6672
}
6773
} finally {
68-
Gauge gauge = gaugeMap.get(ORACLEDB_METRIC_PREFIX + "up");
74+
Gauge gauge = gaugeMap.get(ORACLEDB_METRIC_PREFIX + UP);
6975
if (gauge == null) {
70-
Gauge upgauge = Gauge.build().name(ORACLEDB_METRIC_PREFIX + "up").help("Whether the Oracle database server is up.").register();
76+
Gauge upgauge = Gauge.build().name(ORACLEDB_METRIC_PREFIX + UP).help("Whether the Oracle database server is up.").register();
7177
upgauge.set(isConnectionSuccessful);
72-
gaugeMap.put(ORACLEDB_METRIC_PREFIX + "up", upgauge);
78+
gaugeMap.put(ORACLEDB_METRIC_PREFIX + UP, upgauge);
7379
} else gauge.set(isConnectionSuccessful);
7480
}
7581
}
@@ -87,38 +93,37 @@ private void processMetrics() throws IOException, SQLException {
8793
*/
8894
private void processMetric(Connection connection, Iterator<JsonNode> metric) {
8995
JsonNode next = metric.next();
90-
//todo ignore case
91-
String context = next.get("context").asText(); // eg context = "teq"
92-
String metricsType = next.get("metricstype") == null ? "" :next.get("metricstype").asText();
93-
JsonNode metricsdescNode = next.get("metricsdesc");
96+
String context = next.get(CONTEXT).asText(); // eg context = "teq"
97+
String metricsType = next.get(METRICSTYPE) == null ? "" :next.get(METRICSTYPE).asText();
98+
JsonNode metricsdescNode = next.get(METRICSDESC);
9499
// eg metricsdesc = { enqueued_msgs = "Total enqueued messages.", dequeued_msgs = "Total dequeued messages.", remained_msgs = "Total remained messages."}
95100
Iterator<Map.Entry<String, JsonNode>> metricsdescIterator = metricsdescNode.fields();
96101
Map<String, String> metricsDescMap = new HashMap<>();
97102
while(metricsdescIterator.hasNext()) {
98103
Map.Entry<String, JsonNode> metricsdesc = metricsdescIterator.next();
99104
metricsDescMap.put(metricsdesc.getKey(), metricsdesc.getValue().asText());
100105
}
101-
LOG.debug("context:" + context);
106+
LOGGER.debug("context:" + context);
102107
String[] labelNames = new String[0];
103-
if (next.get("labels") != null) {
104-
int size = next.get("labels").size();
105-
Iterator<JsonNode> labelIterator = next.get("labels").iterator();
108+
if (next.get(LABELS) != null) {
109+
int size = next.get(LABELS).size();
110+
Iterator<JsonNode> labelIterator = next.get(LABELS).iterator();
106111
labelNames = new String[size];
107112
for (int i = 0; i < size; i++) {
108113
labelNames[i] = labelIterator.next().asText();
109114
}
110-
LOG.debug("\n");
115+
LOGGER.debug("\n");
111116
}
112-
String request = next.get("request").asText(); // the sql query
113-
String ignorezeroresult = next.get("ignorezeroresult") == null ? "false" : next.get("ignorezeroresult").asText(); //todo, currently defaults to true
117+
String request = next.get(REQUEST).asText(); // the sql query
118+
String ignorezeroresult = next.get(IGNOREZERORESULT) == null ? FALSE : next.get(IGNOREZERORESULT).asText(); //todo, currently defaults to true
114119
ResultSet resultSet;
115120
try {
116121
resultSet = connection.prepareStatement(request).executeQuery();
117122
while (resultSet.next()) {
118123
translateQueryToPrometheusMetric(context, metricsDescMap, labelNames, resultSet);
119124
}
120125
} catch(SQLException e) { //this can be due to table not existing etc.
121-
LOG.warn("DBMetricsExporter.processMetric during:" + request + " exception:" + e);
126+
LOGGER.warn("MetricsExporter.processMetric during:" + request + " exception:" + e);
122127
return;
123128
}
124129
}
@@ -127,7 +132,7 @@ private void translateQueryToPrometheusMetric(String context, Map<String, String
127132
String[] labelNames,
128133
ResultSet resultSet) throws SQLException {
129134
String[] labelValues = new String[labelNames.length];
130-
Map<String, Integer> sqlQueryResults =
135+
Map<String, Long> sqlQueryResults =
131136
extractGaugesAndLabelValues(context, metricsDescMap, labelNames, resultSet, labelValues, resultSet.getMetaData().getColumnCount());
132137
setLabelValues(context, labelNames, labelValues, sqlQueryResults.entrySet().iterator());
133138
}
@@ -142,10 +147,10 @@ private void translateQueryToPrometheusMetric(String context, Map<String, String
142147
* @param columnCount
143148
* @throws SQLException
144149
*/
145-
private Map<String, Integer> extractGaugesAndLabelValues(
150+
private Map<String, Long> extractGaugesAndLabelValues(
146151
String context, Map<String, String> metricsDescMap, String[] labelNames, ResultSet resultSet,
147152
String[] labelValues, int columnCount) throws SQLException {
148-
Map<String, Integer> sqlQueryResults = new HashMap<>();
153+
Map<String, Long> sqlQueryResults = new HashMap<>();
149154
String columnName;
150155
String columnTypeName;
151156
for (int i = 0; i < columnCount; i++) { //for each column...
@@ -154,9 +159,9 @@ private Map<String, Integer> extractGaugesAndLabelValues(
154159
if (columnTypeName.equals("VARCHAR2")) //. typename is 2/NUMBER or 12/VARCHAR2
155160
;
156161
else
157-
sqlQueryResults.put(resultSet.getMetaData().getColumnName(i + 1), resultSet.getInt(i + 1));
162+
sqlQueryResults.put(resultSet.getMetaData().getColumnName(i + 1), resultSet.getLong(i + 1));
158163
String gaugeName = ORACLEDB_METRIC_PREFIX + context + "_" + columnName;
159-
LOG.debug("---gaugeName:" + gaugeName);
164+
LOGGER.debug("---gaugeName:" + gaugeName);
160165
Gauge gauge = gaugeMap.get(gaugeName);
161166
if (gauge == null) {
162167
if(metricsDescMap.containsKey(columnName)) {
@@ -173,9 +178,9 @@ private Map<String, Integer> extractGaugesAndLabelValues(
173178
return sqlQueryResults;
174179
}
175180

176-
private void setLabelValues(String context, String[] labelNames, String[] labelValues, Iterator<Map.Entry<String, Integer>> sqlQueryRestulsEntryIterator) {
181+
private void setLabelValues(String context, String[] labelNames, String[] labelValues, Iterator<Map.Entry<String, Long>> sqlQueryRestulsEntryIterator) {
177182
while(sqlQueryRestulsEntryIterator.hasNext()) { //for each column
178-
Map.Entry<String, Integer> sqlQueryResultsEntry = sqlQueryRestulsEntryIterator.next();
183+
Map.Entry<String, Long> sqlQueryResultsEntry = sqlQueryRestulsEntryIterator.next();
179184
boolean isLabel = false;
180185
for (int ii = 0; ii< labelNames.length; ii++) {
181186
if(labelNames[ii].equals(sqlQueryResultsEntry.getKey())) isLabel =true; // continue
@@ -186,7 +191,7 @@ private void setLabelValues(String context, String[] labelNames, String[] labelV
186191
try {
187192
gaugeMap.get(ORACLEDB_METRIC_PREFIX + context + "_" + sqlQueryResultsEntry.getKey().toLowerCase()).labels(labelValues).set(valueToSet);
188193
} catch (Exception ex) { //todo filter to avoid unnecessary exception handling
189-
LOG.debug("OracleDBMetricsExporter.translateQueryToPrometheusMetric Exc:" + ex);
194+
LOGGER.debug("OracleDBMetricsExporter.translateQueryToPrometheusMetric Exc:" + ex);
190195
}
191196
else gaugeMap.get(ORACLEDB_METRIC_PREFIX + context + "_" + sqlQueryResultsEntry.getKey().toLowerCase()).set(valueToSet);
192197
}

0 commit comments

Comments
 (0)