Skip to content

Commit

Permalink
On index/landing page, the server responsiveness is checked by retrie…
Browse files Browse the repository at this point in the history
…ving the filters instead of the statistics.

The filters API endpoint is fully cached while the statistics are only semi-cached.
This also removes the spark jobserver call and calculations on the average job
  • Loading branch information
Grifs committed Dec 23, 2022
1 parent f10b337 commit f0f579f
Show file tree
Hide file tree
Showing 3 changed files with 67 additions and 103 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# CHANGELOG

## Version 5.4.2

## Minor changes

- On index/landing page, the server responsiveness is checked by retrieving the filters instead of the statistics.
The filters API endpoint is fully cached while the statistics are only semi-cached.
This also removes the spark jobserver call and calculations on the average job duration.

## Version 5.4.1

### Bug fixes
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "LuciusWeb",
"version": "5.4.1",
"version": "5.4.2-alpha1",
"description": "Web interface for ComPass aka Lucius",
"repository": {
"type": "git",
Expand Down
160 changes: 58 additions & 102 deletions src/js/components/Check.js
Original file line number Diff line number Diff line change
@@ -1,47 +1,44 @@
import { a, div, br, label, input, p, button, code, pre, i, span } from '@cycle/dom'
import xs from 'xstream'
import { clone, equal, equals, mergeAll, omit } from 'ramda';
import { equals } from 'ramda';
import dropRepeats from 'xstream/extra/dropRepeats'
import debounce from 'xstream/extra/debounce'

// Alert the user when last response time is 1.5 times higher than the minimum
// over the history of the jobserver.
const LATENCY_FACTOR = 1.5
import sampleCombine from 'xstream/extra/sampleCombine';

function Check(sources) {

const state$ = sources.onion.state$

const props$ = sources.props
.compose(dropRepeats((x, y) => equals(x, y)))
.remember()
.compose(dropRepeats((x, y) => equals(x, y)))
.remember()

// Combine with deployments to the up-to-date endpoint config
const modifiedState$ = xs.combine(state$, sources.deployments)
.map(([state, _]) => state)
.map(([state, _]) => state)

const request$ = xs.combine(modifiedState$, props$)
.map(([_, props]) => {
return {
url: props.url + '&classPath=com.dataintuitive.luciusapi.statistics',
method: 'POST',
send: {},
'category': 'statistics'
}
})
.remember()
.map(([_, props]) => {
return {
url: props.url + '&classPath=com.dataintuitive.luciusapi.filters',
method: 'POST',
send: {},
'category': 'server-status-check'
}
})
.remember()

const response$$ = sources.HTTP
.select('statistics')
.select('server-status-check')

const invalidResponse$ = response$$
.map(response$ =>
response$
.filter(response => false) // ignore regular event
.replaceError(error => xs.of(error)) // emit error
)
.flatten()
.remember()
.map(response$ =>
response$
.filter(response => false) // ignore regular event
.replaceError(error => xs.of(error)) // emit error
)
.flatten()
.remember()

/**
* Parse the successful results only.
Expand All @@ -51,110 +48,69 @@ function Check(sources) {
* wrong job times.
*/
const validResponse$ = response$$
.map(response$ =>
response$
.replaceError(error => xs.empty())
)
.flatten()
.compose(debounce(500))

// A valid response means we should see if the response times are reasonable.
// Trigger a request for every validResponse from statistics.
const requestJobs$ = xs.combine(validResponse$, props$)
.map(([trigger, props]) => ({
url: props.url + '/jobs',
method: 'GET',
send: {},
'category': 'jobs'
}))
.remember()

const responseJobs$$ = sources.HTTP
.select('jobs')

const validResponseJobs$ = responseJobs$$
.map(response$ =>
response$
.replaceError(error => xs.empty())
)
.flatten()
.remember()

const dataStatistics$ = validResponse$
.map(result => result.body.result.data)

const jobs$ = validResponseJobs$
.map(result => result.body)
.map(response$ =>
response$
.replaceError(error => xs.empty())
)
.flatten()
.compose(debounce(500))

/**
* An indicator of the data loading...
*/
const initVdom$ = xs.periodic(200)
.map(i => i % 4)
.map(i => [
span(".grey-text .testing", ".".repeat(i)),
span(".grey-text .text-lighten-4 .testing2", ".".repeat(3-i))
])
.endWhen(validResponseJobs$)
.map(i => i % 4)
.map(i => [
span(".grey-text .testing", ".".repeat(i)),
span(".grey-text .text-lighten-4 .testing2", ".".repeat(3-i))
])
.endWhen(response$$)

/**
* Calculate a measure for the performance of the API.
* We currently look at the difference between the last
* reponse time and the minumum one with a factor
*/
function differenceWithStatisticsResponses(table, factor) {
const statsTable = table
.filter(el => el.classPath === "com.dataintuitive.luciusapi.statistics")
.filter(el => el.status !== "RUNNING")
.map(el => el.duration)
.map(durationString => durationString.replace(' secs', ''))
const lastEntry = statsTable[0]
const minEntry = statsTable.min()
return lastEntry / (minEntry * factor) //statsTable.average()
}

// Apply the metric to the jobs listing
const responseMetric$ = jobs$
.map(jobs => differenceWithStatisticsResponses(jobs, LATENCY_FACTOR))
const requestTime$ = request$.mapTo(new Date().getTime())
const responseTime$ = validResponse$.mapTo(new Date().getTime())

const timeDifference$ = responseTime$.compose(sampleCombine(requestTime$))
.map(([request, response]) => (response - request) / 1000 )

const maxNormalTime$ = state$.map((state) => state.settings.config.normalStatisticsResponseTime)

// When the performance metric is higher than 1, we show the user a message.
const delay$ = xs.combine(responseMetric$, maxNormalTime$)
const delay$ = xs.combine(timeDifference$, maxNormalTime$)
.filter(([metric, max]) => metric > max)
.mapTo({ text: 'The cluster seems to be slower than expected.\n Please have patience or try again in 5"...', duration: 15000 })
.mapTo({ text: 'The cluster seems to be slower than expected.\n Please have patience or try again in 5\'...', duration: 15000 })

const loadedVdom$ = xs.combine(responseMetric$, maxNormalTime$)
.map(([metric, max]) =>
(metric < max) ?
i('.material-icons .green-text .medium .result-good', 'done') :
i('.material-icons .red-text .medium .result-busy', 'done')
)
const loadedVdom$ = xs.combine(timeDifference$, maxNormalTime$)
.map(([metric, max]) =>
(metric < max) ?
i('.material-icons .green-text .medium .result-good', 'done') :
i('.material-icons .red-text .medium .result-busy', 'done')
)

const errorVdom$ = invalidResponse$.mapTo(i('.material-icons .red-text .medium .result-down', 'trending_down'))

const vdom$ = xs.merge(
initVdom$,
// loadingVdom$,
loadedVdom$,
errorVdom$,
initVdom$,
// loadingVdom$,
loadedVdom$,
errorVdom$,
).remember()

const alert$ = invalidResponse$
.remember()
.remember()

// This is needed in order to get the onion stream active!
const defaultReducer$ = xs.of(prevState => {
if (typeof prevState === 'undefined') {
return {}
} else {
return prevState
}
if (typeof prevState === 'undefined') {
return {}
} else {
return prevState
}
});

return {
DOM: vdom$,
HTTP: xs.merge(request$, requestJobs$),
HTTP: xs.merge(request$),
onion: xs.merge(
defaultReducer$,
),
Expand Down

0 comments on commit f0f579f

Please sign in to comment.