@@ -214,51 +214,26 @@ func (e *Exporter) scheduledScrape(tick *time.Time) {
214214
215215func (e * Exporter ) scrape (ch chan <- prometheus.Metric , tick * time.Time ) {
216216 e .totalScrapes .Inc ()
217- var err error
218- var scrapemutex sync.Mutex
219- errChan := make (chan ScrapeResult , len (e .metricsToScrape .Metric ))
217+ errChan := make (chan error , len (e .metricsToScrape .Metric ))
218+ begun := time .Now ()
220219
221- defer func (begun time.Time ) {
222- // other error
223- e .duration .Set (time .Since (begun ).Seconds ())
224- if err == nil {
225- e .error .Set (0 )
226- } else {
227- e .error .Set (1 )
228- }
229-
230- // scrape error
231- close (errChan )
232- for scrape := range errChan {
233- if scrape .Err != nil {
234- if shouldLogScrapeError (scrape .Err , scrape .Metric .IgnoreZeroResult ) {
235- level .Error (e .logger ).Log ("msg" , "Error scraping metric" ,
236- "Context" , scrape .Metric .Context ,
237- "MetricsDesc" , fmt .Sprint (scrape .Metric .MetricsDesc ),
238- "time" , time .Since (scrape .ScrapeStart ),
239- "error" , scrape .Err )
240- }
241- e .scrapeErrors .WithLabelValues (scrape .Metric .Context ).Inc ()
242- }
243- }
244-
245- }(time .Now ())
246-
247- if err = e .db .Ping (); err != nil {
248- level .Debug (e .logger ).Log ("msg" , "error = " + err .Error ())
249- if strings .Contains (err .Error (), "sql: database is closed" ) {
220+ if connectionError := e .db .Ping (); connectionError != nil {
221+ level .Debug (e .logger ).Log ("msg" , "error = " + connectionError .Error ())
222+ if strings .Contains (connectionError .Error (), "sql: database is closed" ) {
250223 level .Info (e .logger ).Log ("msg" , "Reconnecting to DB" )
251- err = e .connect ()
252- if err != nil {
253- level .Error (e .logger ).Log ("msg" , "Error reconnecting to DB" , err )
224+ connectionError = e .connect ()
225+ if connectionError != nil {
226+ level .Error (e .logger ).Log ("msg" , "Error reconnecting to DB" , connectionError )
254227 }
255228 }
256229 }
257230
258- if err = e .db .Ping (); err != nil {
231+ if pingError : = e .db .Ping (); pingError != nil {
259232 level .Error (e .logger ).Log ("msg" , "Error pinging oracle" ,
260- "error" , err )
233+ "error" , pingError )
261234 e .up .Set (0 )
235+ e .error .Set (1 )
236+ e .duration .Set (time .Since (begun ).Seconds ())
262237 return
263238 }
264239
@@ -271,15 +246,10 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric, tick *time.Time) {
271246 e .reloadMetrics ()
272247 }
273248
274- wg := sync.WaitGroup {}
275-
276249 for _ , metric := range e .metricsToScrape .Metric {
277- wg .Add (1 )
278250 metric := metric //https://golang.org/doc/faq#closures_and_goroutines
279251
280252 go func () {
281- defer wg .Done ()
282-
283253 level .Debug (e .logger ).Log ("msg" , "About to scrape metric" ,
284254 "Context" , metric .Context ,
285255 "MetricsDesc" , fmt .Sprint (metric .MetricsDesc ),
@@ -291,11 +261,13 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric, tick *time.Time) {
291261 "Request" , metric .Request )
292262
293263 if len (metric .Request ) == 0 {
264+ errChan <- errors .New ("scrape request not found" )
294265 level .Error (e .logger ).Log ("msg" , "Error scraping for " + fmt .Sprint (metric .MetricsDesc )+ ". Did you forget to define request in your toml file?" )
295266 return
296267 }
297268
298269 if len (metric .MetricsDesc ) == 0 {
270+ errChan <- errors .New ("metricsdesc not found" )
299271 level .Error (e .logger ).Log ("msg" , "Error scraping for query" + fmt .Sprint (metric .Request )+ ". Did you forget to define metricsdesc in your toml file?" )
300272 return
301273 }
@@ -304,19 +276,26 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric, tick *time.Time) {
304276 if metricType == "histogram" {
305277 _ , ok := metric .MetricsBuckets [column ]
306278 if ! ok {
279+ errChan <- errors .New ("metricsbuckets not found" )
307280 level .Error (e .logger ).Log ("msg" , "Unable to find MetricsBuckets configuration key for metric. (metric=" + column + ")" )
308281 return
309282 }
310283 }
311284 }
312285
313286 scrapeStart := time .Now ()
314- if err1 := func () error {
315- scrapemutex .Lock ()
316- defer scrapemutex .Unlock ()
317- return e .ScrapeMetric (e .db , ch , metric , tick )
318- }(); err1 != nil {
319- errChan <- ScrapeResult {Err : err1 , Metric : metric , ScrapeStart : scrapeStart }
287+ scrapeError := e .ScrapeMetric (e .db , ch , metric , tick )
288+ // Always send the scrapeError, nil or non-nil
289+ errChan <- scrapeError
290+ if scrapeError != nil {
291+ if shouldLogScrapeError (scrapeError , metric .IgnoreZeroResult ) {
292+ level .Error (e .logger ).Log ("msg" , "Error scraping metric" ,
293+ "Context" , metric .Context ,
294+ "MetricsDesc" , fmt .Sprint (metric .MetricsDesc ),
295+ "time" , time .Since (scrapeStart ),
296+ "error" , scrapeError )
297+ }
298+ e .scrapeErrors .WithLabelValues (metric .Context ).Inc ()
320299 } else {
321300 level .Debug (e .logger ).Log ("msg" , "Successfully scraped metric" ,
322301 "Context" , metric .Context ,
@@ -325,7 +304,23 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric, tick *time.Time) {
325304 }
326305 }()
327306 }
328- wg .Wait ()
307+
308+ e .afterScrape (begun , len (e .metricsToScrape .Metric ), errChan )
309+ }
310+
311+ func (e * Exporter ) afterScrape (begun time.Time , countMetrics int , errChan chan error ) {
312+ // Receive all scrape errors
313+ totalErrors := 0.0
314+ for i := 0 ; i < countMetrics ; i ++ {
315+ scrapeError := <- errChan
316+ if scrapeError != nil {
317+ totalErrors ++
318+ }
319+ }
320+ close (errChan )
321+
322+ e .duration .Set (time .Since (begun ).Seconds ())
323+ e .error .Set (totalErrors )
329324}
330325
331326func (e * Exporter ) connect () error {
0 commit comments