Skip to content

Commit

Permalink
Merge pull request #2123 from threefoldtech/development_statsSummary
Browse files Browse the repository at this point in the history
add stats summary endpoint
  • Loading branch information
AhmedHanafy725 authored Feb 12, 2024
2 parents b994468 + 93a1e2d commit 4259b69
Show file tree
Hide file tree
Showing 5 changed files with 272 additions and 1 deletion.
3 changes: 2 additions & 1 deletion packages/new_stats/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,6 @@ COPY . /app/
RUN yarn install
RUN yarn lerna run build --no-private && yarn workspace @threefold/newstats build

FROM nginx:1.16.0-alpine
FROM nginx:1.25.3-alpine
COPY --from=build /app/packages/new_stats/dist /usr/share/nginx/html
COPY --from=build /app/packages/new_stats/nginx /etc/nginx
47 changes: 47 additions & 0 deletions packages/new_stats/nginx/conf.d/default.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
server {
listen 80;
listen [::]:80;
server_name localhost;

#access_log /var/log/nginx/host.access.log main;

location / {
root /usr/share/nginx/html;
index index.html index.htm;
}
location /api/stats-summary {
default_type application/json;
js_content main.getStats;
}
#error_page 404 /404.html;

# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}

# proxy the PHP scripts to Apache listening on 127.0.0.1:80
#
#location ~ \.php$ {
# proxy_pass http://127.0.0.1;
#}

# pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
#
#location ~ \.php$ {
# root html;
# fastcgi_pass 127.0.0.1:9000;
# fastcgi_index index.php;
# fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
# include fastcgi_params;
#}

# deny access to .htaccess files, if Apache's document root
# concurs with nginx's one
#
#location ~ /\.ht {
# deny all;
#}
}
37 changes: 37 additions & 0 deletions packages/new_stats/nginx/nginx.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@

user nginx;
worker_processes auto;

error_log /var/log/nginx/error.log notice;
pid /var/run/nginx.pid;
load_module modules/ngx_http_js_module.so;


events {
worker_connections 1024;
}


http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
js_path "/etc/nginx/njs/";

resolver 8.8.8.8;
js_import main from stats.js;

log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';

access_log /var/log/nginx/access.log main;

sendfile on;
#tcp_nopush on;

keepalive_timeout 65;

#gzip on;

include /etc/nginx/conf.d/*.conf;
}
45 changes: 45 additions & 0 deletions packages/new_stats/nginx/njs/cache.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
const fs = require("fs");
function updateCache(summary, path) {
const data = {
summary,
updatedAt: Date.now(),
};
fs.writeFileSync(path, JSON.stringify(data));
}

function isLessThan24Hours(timestamp) {
const now = Date.now();
const difference = now - timestamp;
const hours = 24;
return difference < hours * 60 * 60 * 1000;
}

const DUMMY_DATA = {
capacity: "32.74 PB",
nodes: 2569,
countries: 61,
cores: 63968,
};
function readCache(path) {
try {
const cache = JSON.parse(fs.readFileSync(path));
if (cache.summary) {
const validCache = isLessThan24Hours(cache.updatedAt);
return {
summary: cache.summary,
valid: validCache,
};
} else throw "Invalid cache";
} catch (error) {
return {
summary: DUMMY_DATA,
valid: false,
error,
};
}
}

export default {
updateCache,
readCache,
};
141 changes: 141 additions & 0 deletions packages/new_stats/nginx/njs/stats.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
import cache from "./cache.js";

const RETRIES = 3;
const cache_path = "/tmp/statsSummary.json";
let URLS = [
"https://gridproxy.grid.tf/stats?status=up",
"https://gridproxy.grid.tf/stats?status=standby",
"https://gridproxy.test.grid.tf/stats?status=up",
"https://gridproxy.test.grid.tf/stats?status=standby",
"https://gridproxy.dev.grid.tf/stats?status=up",
"https://gridproxy.dev.grid.tf/stats?status=standby",
];

async function getStats(r) {
const cachedData = cache.readCache(cache_path);
if (cachedData.valid) {
r.return(200, JSON.stringify(cachedData.summary));
return;
}
r.log(`Outdated cache, trying to update it...`);

try {
const stats = await fetchStats(r);
r.return(200, JSON.stringify(stats));
return;
} catch (error) {
r.error(`Failed to fetch stats: ${error}`);
cachedData.summary.outdated = true;
if (cachedData.error) r.error(`Failed to read cached data due to ${cachedData.error} \nretuning Dummy data`);
r.return(200, JSON.stringify(cachedData.summary));
return;
}
}

function initTargeRequests(urls, r) {
return urls.map(url =>
// eslint-disable-next-line no-undef
ngx
.fetch(url, { verify: r.headersIn["Host"].split(":")[0] !== "localhost" }) // disable ssl on localhost
.then(res => res.json()),
);
}
async function fetchStats(r) {
let retries = 0;
const stats = [];
while (URLS.length !== 0 && retries < RETRIES) {
const responses = await Promise.allSettled(initTargeRequests(URLS, r));
const failedURls = [];
responses.forEach((item, index) => {
if (item.status === "fulfilled") {
stats.push(item.value);
} else {
failedURls.push(URLS[index]);
}
});
URLS = failedURls;
if (URLS.length !== 0) retries++;
}
let result;
if (retries >= 3 && URLS.length > 0) throw `Failed to get response form ${URLS} after ${retries} retries`;
else result = mergeStatsData(stats);
try {
cache.updateCache(result, cache_path);
} catch (err) {
r.error(`Failed to update cache due to: ${err}`);
}

return result;
}

function mergeStatsData(stats) {
const res = Object.assign({}, stats[0]);

for (let i = 1; i < stats.length; i++) {
if (stats[i]) {
res.nodes += stats[i].nodes;
res.totalCru += stats[i].totalCru;
res.totalHru += stats[i].totalHru;
res.totalSru += stats[i].totalSru;
res.nodesDistribution = mergeNodeDistribution([res.nodesDistribution, stats[i].nodesDistribution]);
res.countries = Object.keys(res.nodesDistribution).length;
}
}

const result = {};
result.capacity = toTeraOrGiga(res.totalHru + res.totalSru);
result.nodes = res.nodes;
result.countries = res.countries;
result.cores = res.totalCru;

return result;
}

function mergeNodeDistribution(stats) {
const keys = [];
for (let i = 0; i < stats.length; i++) {
const obj = stats[i];
if (obj) {
const objKeys = Object.keys(obj);
for (let j = 0; j < objKeys.length; j++) {
if (!keys.includes(objKeys[j])) keys.push(objKeys[j]);
}
}
}

const result = {};
keys.forEach(function (key) {
result[key] = 0;
for (let i = 0; i < stats.length; i++) {
const country = stats[i];
result[key] += country ? country[key] || 0 : 0;
}
});

return result;
}

function toTeraOrGiga(value) {
const giga = 1024 ** 3;

if (!value) return "0";

const val = +value;
if (val === 0 || isNaN(val)) return "0";

if (val < giga) return val.toString();

let gb = val / giga;

if (gb < 1024) return gb.toFixed(2) + " GB";

gb = gb / 1024;

if (gb < 1024) return gb.toFixed(2) + " TB";

gb = gb / 1024;
return gb.toFixed(2) + " PB";
}

// Exporting the main function for Nginx
export default { getStats };

0 comments on commit 4259b69

Please sign in to comment.