Merge branch 'main' into blockchain

This commit is contained in:
Adam
2025-07-30 16:14:13 +02:00
committed by GitHub
30 changed files with 280 additions and 3383 deletions

17
.env.example Normal file
View File

@ -0,0 +1,17 @@
OUT_PORT=8443
JWT_SECRET=wut
PROM_PORT=9090
# this hash is generated with : htpasswd -nbB admin admin
PROM_HASH=$2y$05$29xV50WIgdOcLtFwKnr9cObGlS9booo97Ny7gRxAuADkwF9rS3nD.
PROM_PASS=admin
GRAPH_PORT=3000
ELK_PORT=5601
GOOGLE_CALLBACK_URL=https://localhost:8443/api/v1
GOOGLE_CLIENT_SECRET=susAF
GOOGLE_CLIENT_ID=Really

View File

@ -16,7 +16,7 @@ set dotenv-load
# To launch all apis
@apis:
node src/dev.js
node src/start.js
# To launch the front end
@front:
@ -53,3 +53,6 @@ set dotenv-load
@verify-contract:
forge verify-contract --chain-id 43113 --rpc-url=${AVAX_RPC_URL} --watch ${AVAX_CONTRACT_ADDR}
@status:
docker compose -f docker/docker-compose.yml ps

View File

@ -1,2 +1,62 @@
# knl_meowscendence
Press F to pay respect
## Folder structure
```
├── docker/ # Docker-related setup for services
│ ├── api-base/ # Backend API base image and compose file
│ ├── front/ # Frontend image, config, and cert automation
│ ├── monitoring/ # Monitoring stack: Prometheus, Grafana, exporters
│ ├── networks.yml # Docker network definitions
│ └── volumes.yml # Docker volume definitions
├── src/ # Application source code
│ ├── api/ # Backend logic (auth, user management)
│ ├── front/ # Frontend files
│ └── utils/ # Utility modules (auth, TOTP, etc.)
├── flake.nix & flake.lock # Nix flake configuration
└── Justfile # Task automation commands
```
## Modules done
5 major + 2 minor = 6 full modules
- **Web**
- [x] Use a framework to build the backend.(node with Fastify) - Major
- [ ] Use a framework or toolkit to build the front-end.(Tailwind CSS) - Minor
- [x] Use a database for the backend -and more.(SQLite) - Minor
- [ ] Store the score of a tournament in the Blockchain.(Soldity on Avalanche) - Major
- **User Management**
- [ ] Standard user management, authentication and users across tournaments. - Major
- [x] Implement remote authentication. - Major
- **Gameplay and user experience**
- [ ] Remote players - Major
- [ ] Multiplayer - Major
- [ ] Add another game - Major
- [ ] Game customization options - Minor
- [ ] Live chat - Major
- **AI-Algo**
- [ ] AI opponent - Major
- [ ] User and game stats dashboards - Minor
- **Cybersecurity**
- [ ] WAF/ModSecurity and Hashicorp Vault - Major
- [ ] RGPD compliance - Minor
- [x] 2FA and JWT - Major
- **DevOps**
- [x] Infrasctructure setup for log management - Major
- [x] Monitoring system - Minor
- [x] Designing the backend in micro-architecture - Major
- **Graphics**
- [ ] Use of advanced 3D techniques - Major
- **Accessibility**
- [ ] Support on all devices - Minor
- [ ] Expanding Browser compatibility - Minor
- [ ] Multiple language support - Minor
- [ ] Add accessibility for visually impaired users - Minor
- [ ] Server-Side Rendering (SSR) integration - Minor
- **Server-Side Pong**
- [ ] Replace basic pong with server-side pong and implementing an API - Major
- [ ] Enabling pong gameplay via CLI against web users with API integration
## License
This project is under [MIT License](LICENSE)

4
docker/ELK/compose.yml Normal file
View File

@ -0,0 +1,4 @@
include:
- ./logstash/compose.yml
- ./kibana/compose.yml
- ./elasticsearch/compose.yml

View File

@ -0,0 +1,4 @@
FROM docker.elastic.co/elasticsearch/elasticsearch-wolfi:9.0.4
COPY --chown=elasticsearch:elasticsearch elasticsearch.yml /usr/share/elasticsearch/config/
COPY --chown=elasticsearch:elasticsearch jvm.options /usr/share/elasticsearch/config/jvm.options.d/custom.options

View File

@ -0,0 +1,10 @@
services:
elasticsearch:
container_name: transcendence-elasticsearch
build:
dockerfile: Dockerfile
context: .
environment:
- LOG_LEVEL=info
networks:
- elk

View File

@ -0,0 +1,9 @@
cluster.name: docker-cluster
node.name: transcendence-elasticsearch
discovery.type: single-node
xpack.security.enabled: false
network.host: 0.0.0.0

View File

@ -0,0 +1,2 @@
-Xms1g
-Xmx1g

View File

@ -0,0 +1,3 @@
FROM docker.elastic.co/kibana/kibana-wolfi:9.0.4
COPY --chmod=777 kibana.yml /etc/kibana/kibana.yml

View File

@ -0,0 +1,12 @@
services:
kibana:
container_name: transcendence-kibana
build:
dockerfile: Dockerfile
context: .
ports:
- ${ELK_PORT}:5601
environment:
- LOG_LEVEL=info
networks:
- elk

View File

@ -0,0 +1,6 @@
server.name: kibana
server.host: "0.0.0.0"
elasticsearch.hosts: ["https://transcendence-elasticsearch:9200"]
telemetry.enabled: false

View File

@ -0,0 +1,6 @@
FROM docker.elastic.co/logstash/logstash-wolfi:9.0.4
RUN rm -f /usr/share/logstash/pipeline/logstash.conf
COPY --chmod=777 pipeline/ /usr/share/logstash/pipeline/
COPY --chmod=777 config/ /usr/share/logstash/config/

View File

@ -0,0 +1,15 @@
services:
logstash:
container_name: transcendence-logstash
build:
dockerfile: Dockerfile
context: .
volumes:
- log-user:/var/log/user-api
- log-auth:/var/log/auth-api
- log-nginx:/var/log/nginx
environment:
- LOG_LEVEL=info
networks:
- elk
- logstash

View File

@ -0,0 +1,2 @@
api:
environment: production

View File

@ -0,0 +1,27 @@
input {
file {
path => "/var/log/user-api/log.log"
start_position => "beginning"
tags => [ "api", "user" ]
}
file {
path => "/var/log/auth-api/log.log"
start_position => "beginning"
tags => [ "api", "auth" ]
}
file {
path => "/var/log/nginx/log.log"
start_position => "beginning"
tags => [ "nginx", "front" ]
}
file {
path => "/var/log/nginx/err.log"
start_position => "beginning"
tags => [ "nginx", "front", "error" ]
}
}
output {
elasticsearch { hosts => ["transcendence-elasticsearch:9200"] }
stdout { codec => rubydebug }
}

View File

@ -24,5 +24,7 @@ EXPOSE 3000
RUN mkdir /db
STOPSIGNAL SIGINT
ENV LOG_TARGET=/var/log/log.log
RUN touch /var/log/log.log
CMD [ "node", "/app/src/start.js" ]

View File

@ -6,6 +6,7 @@ services:
context: ../../
volumes:
- db-user:/db
- log-user:/var/log
networks:
- front
- back
@ -13,6 +14,7 @@ services:
environment:
- TZ=Europe/Paris
- API_TARGET=user
- LOG_FILE_PATH=/var/log/log.log
- JWT_SECRET=${JWT_SECRET}
restart: unless-stopped
auth-api:
@ -22,12 +24,17 @@ services:
context: ../../
volumes:
- db-auth:/db
- log-auth:/var/log
networks:
- front
- back
- prom-exporter
environment:
- TZ=Europe/Paris
- GOOGLE_CALLBACK_URL=${GOOGLE_CALLBACK_URL}
- GOOGLE_CLIENT_ID=${GOOGLE_CLIENT_ID}
- GOOGLE_CLIENT_SECRET=${GOOGLE_CLIENT_SECRET}
- API_TARGET=auth
- LOG_FILE_PATH=/var/log/log.log
- JWT_SECRET=${JWT_SECRET}
restart: unless-stopped

View File

@ -6,3 +6,4 @@ include:
- ./monitoring/compose.yml
- ./api-base/compose.yml
- ./front/compose.yml
- ./ELK/compose.yml

View File

@ -25,5 +25,11 @@ COPY --chmod=755 docker/front/entry/ssl-cert.sh /docker-entrypoint.d/ssl-cert.s
COPY --from=builder /app/dist /usr/share/nginx/html
USER root
RUN mkdir -p /var/log/front
RUN touch /var/log/front/err.log /var/log/front/log.log
RUN chmod -R 777 /var/log/front
USER nginx
EXPOSE 80 443
STOPSIGNAL SIGINT

View File

@ -6,8 +6,9 @@ services:
context: ../../
ports:
- ${OUT_PORT}:443
volumes:
- log-nginx:/var/log/front
environment:
- SERVER_NAME=localhost
- TZ=Europe/Paris
depends_on:
user-api:

View File

@ -1,4 +1,7 @@
server {
error_log /var/log/front/err.log warn;
access_log /var/log/front/log.log;
listen 443 ssl;
server_name example.com; # Replace with your domain or handle env vars externally

View File

@ -24,7 +24,7 @@ scrape_configs:
static_configs:
- targets: ['node-exporter:9100']
- job_name: 'nodejs'
static_configs:
- targets: ['transcendence-api-auth:3000']
- targets: ['transcendence-api-user:3000']
# - job_name: 'nodejs'
# static_configs:
# - targets: ['transcendence-api-auth:3000']
# - targets: ['transcendence-api-user:3000']

View File

@ -7,3 +7,7 @@ networks:
name: transcendence-prom
prom-exporter:
name: transcendence-prom-exporter
elk:
name: transcendence-elk
logstash:
name: transcendence-logstash

View File

@ -5,3 +5,9 @@ volumes:
name: transcendence-api-auth-db
db-user:
name: transcendence-api-user-db
log-auth:
name: transcendence-api-auth-log
log-user:
name: transcendence-api-user-log
log-nginx:
name: transcendence-front-log

View File

@ -48,7 +48,7 @@
fi
if [ ! -d lib/ ]; then
echo Installing foundry env
forge i
forge i
fi
export PATH+=:$(pwd)/node_modules/.bin
echo entering ft_trans env

View File

@ -11,7 +11,7 @@
"ethers": "^6.15.0",
"fastify": "^5.4.0",
"fastify-cli": "^7.4.0",
"google-auth-library": "^10.1.0",
"pino": "^9.7.0",
"prom-client": "^15.1.3",
"solhint": "^6.0.0"
},

3277
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,5 @@
import fastifyJWT from '@fastify/jwt';
import fastifyCookie from '@fastify/cookie';
import client from 'prom-client';
import { register } from './register.js';
import { login } from './login.js';
@ -14,7 +13,6 @@ import { totpVerify } from './totpVerify.js';
const saltRounds = 10;
export const appName = process.env.APP_NAME || 'knl_meowscendence';
const collectDefaultMetrics = client.collectDefaultMetrics
authDB.prepareDB();
@ -24,29 +22,6 @@ authDB.prepareDB();
*/
export default async function(fastify, options) {
collectDefaultMetrics({ labels: { service: "auth-api" } })
client.register.setDefaultLabels({ service: "auth-api" })
const httpRequestCounter = new client.Counter({
name: 'http_requests_total',
help: 'Total number of HTTP requests',
labelNames: ['method', 'route', 'status_code'],
})
fastify.addHook('onResponse', (req, res, done) => {
httpRequestCounter.inc({
method: req.method,
route: req.routerPath || req.url,
status_code: res.statusCode,
})
done()
})
fastify.get('/metrics', async (req, reply) => {
reply
.header('Content-Type', client.register.contentType)
.send(await client.register.metrics())
})
fastify.register(fastifyJWT, {
secret: process.env.JWT_SECRET || '123456789101112131415161718192021',
cookie: {

View File

@ -1,10 +1,8 @@
import fastifyJWT from '@fastify/jwt';
import fastifyCookie from '@fastify/cookie';
import Database from 'better-sqlite3';
import client from 'prom-client';
var env = process.env.NODE_ENV || 'development';
const collectDefaultMetrics = client.collectDefaultMetrics
let database;
@ -58,31 +56,6 @@ const deleteFriends = database.prepare('DELETE FROM friends WHERE username = ?;'
* @param {import('fastify').FastifyPluginOptions} options
*/
export default async function(fastify, options) {
collectDefaultMetrics({ labels: { service: "auth-api" } })
client.register.setDefaultLabels({ service: "auth-api" })
const httpRequestCounter = new client.Counter({
name: 'http_requests_total',
help: 'Total number of HTTP requests',
labelNames: ['method', 'route', 'status_code'],
})
fastify.addHook('onResponse', (req, res, done) => {
httpRequestCounter.inc({
method: req.method,
route: req.routerPath || req.url,
status_code: res.statusCode,
})
done()
})
fastify.get('/metrics', async (req, reply) => {
reply
.header('Content-Type', client.register.contentType)
.send(await client.register.metrics())
})
fastify.register(fastifyJWT, {
secret: process.env.JWT_SECRET || '123456789101112131415161718192021',
cookie: {

View File

@ -2,65 +2,81 @@ import Fastify from 'fastify';
import authApi from './api/auth/default.js';
import userApi from './api/user/default.js';
import scoreApi from './api/scoreStore/default.js';
import fs from 'fs';
import path from 'path';
const loggerOption = {
transport: {
target: 'pino-pretty',
options: {
colorize: true,
translateTime: 'HH:MM:ss',
ignore: 'pid,hostname'
}
}
const isProduction = process.env.NODE_ENV === 'production';
const logFilePath = process.env.LOG_FILE_PATH || './logs/api.log';
const loggerOption = () => {
if (!isProduction) {
return {
transport: {
target: 'pino-pretty',
options: {
colorize: true,
translateTime: 'HH:MM:ss',
ignore: 'pid,hostname',
},
},
};
} else {
// Make sure the directory exists
const logDir = path.dirname(logFilePath);
fs.mkdirSync(logDir, { recursive: true });
const logStream = fs.createWriteStream(logFilePath, { flags: 'a' }); // append mode
return {
level: 'info',
stream: logStream,
};
}
};
function sigHandle(signal) {
process.exit(0);
}
process.on('SIGINT', sigHandle);
async function start() {
const target = process.env.API_TARGET || 'all';
const servers = [];
if (target === 'auth' || target === 'all') {
const auth = Fastify({ logger: loggerOption });
const auth = Fastify({ logger: loggerOption('auth') });
auth.register(authApi);
if (target !== 'all') {
await auth.listen({ port: 3000, host: '0.0.0.0' });
console.log('Auth API listening on http://0.0.0.0:3000');
}
else {
await auth.listen({ port: 3001, host: '127.0.0.1'});
console.log('Auth API listening on http://localhost:3001');
}
const port = target === 'all' ? 3001 : 3000;
const host = target === 'all' ? '127.0.0.1' : '0.0.0.0';
await auth.listen({ port, host });
console.log(`Auth API listening on http://${host}:${port}`);
servers.push(auth);
}
if (target === 'user' || target === 'all') {
const user = Fastify({ logger: loggerOption });
const user = Fastify({ logger: loggerOption('user') });
user.register(userApi);
if (target !== 'all') {
await user.listen({ port: 3000, host: '0.0.0.0' });
console.log('User API listening on http://0.0.0.0:3000');
}
else {
await user.listen({ port: 3002, host: '127.0.0.1'});
console.log('User API listening on http://localhost:3002');
}
const port = target === 'all' ? 3002 : 3000;
const host = target === 'all' ? '127.0.0.1' : '0.0.0.0';
await user.listen({ port, host });
console.log(`User API listening on http://${host}:${port}`);
servers.push(user);
}
if (target === 'scoreScore' || target === 'all') {
const score = Fastify({ logger: loggerOption('scoreStore') });
score.register(scoreApi);
const port = target === 'all' ? 3002 : 3000;
const host = target === 'all' ? '127.0.0.1' : '0.0.0.0';
await score.listen({ port, host });
console.log(`ScoreStore API listening on http://${host}:${port}`);
servers.push(score);
}
if (target === 'scoreStore' || target === 'all') {
const scoreStore = Fastify({ logger: loggerOption });
scoreStore.register(scoreApi);
if (target !== 'all') {
await scoreStore.listen({ port: 3000, host: '0.0.0.0' });
console.log('scoreStore API listening on http://0.0.0.0:3000');
}
else {
await scoreStore.listen({ port: 3002, host: '127.0.0.1'});
console.log('scoreStore API listening on http://localhost:3002');
}
}
// Graceful shutdown on SIGINT
process.on('SIGINT', async () => {
console.log('SIGINT received, closing servers...');
await Promise.all(servers.map((srv) => srv.close()));
process.exit(0);
});
}
start().catch(console.error);
start().catch((err) => {
console.error(err);
process.exit(1);
});