mirror of
https://github.com/OneUptime/oneuptime.git
synced 2026-01-11 19:56:44 +00:00
Rename Ingestor to ProbeIngest; update configurations, routes, and Docker support; add new request types and workflows
This commit is contained in:
parent
3a1f5c7120
commit
815ae7161d
46 changed files with 127 additions and 127 deletions
4
.github/workflows/build.yml
vendored
4
.github/workflows/build.yml
vendored
|
|
@ -240,7 +240,7 @@ jobs:
|
|||
- name: build docker image
|
||||
run: sudo docker build -f ./Probe/Dockerfile .
|
||||
|
||||
docker-build-ingestor:
|
||||
docker-build-probe-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
|
|
@ -253,7 +253,7 @@ jobs:
|
|||
|
||||
# build image probe api
|
||||
- name: build docker image
|
||||
run: sudo docker build -f ./Ingestor/Dockerfile .
|
||||
run: sudo docker build -f ./ProbeIngest/Dockerfile .
|
||||
|
||||
docker-build-open-telemetry-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
|
|
|
|||
4
.github/workflows/compile.yml
vendored
4
.github/workflows/compile.yml
vendored
|
|
@ -204,7 +204,7 @@ jobs:
|
|||
- run: cd Common && npm install
|
||||
- run: cd Probe && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-ingestor:
|
||||
compile-probe-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
|
|
@ -214,7 +214,7 @@ jobs:
|
|||
with:
|
||||
node-version: 18.3.0
|
||||
- run: cd Common && npm install
|
||||
- run: cd Ingestor && npm install && npm run compile && npm run dep-check
|
||||
- run: cd ProbeIngest && npm install && npm run compile && npm run dep-check
|
||||
|
||||
compile-open-telemetry-ingest:
|
||||
runs-on: ubuntu-latest
|
||||
|
|
|
|||
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
|
|
@ -553,7 +553,7 @@ jobs:
|
|||
GIT_SHA=${{ github.sha }}
|
||||
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
|
||||
|
||||
ingestor-docker-image-deploy:
|
||||
probe-ingest-docker-image-deploy:
|
||||
needs: [generate-build-number]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
@ -562,8 +562,8 @@ jobs:
|
|||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
oneuptime/ingestor
|
||||
ghcr.io/oneuptime/ingestor
|
||||
oneuptime/probe-ingest
|
||||
ghcr.io/oneuptime/probe-ingest
|
||||
tags: |
|
||||
type=raw,value=release,enable=true
|
||||
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
|
||||
|
|
@ -585,7 +585,7 @@ jobs:
|
|||
- name: Generate Dockerfile from Dockerfile.tpl
|
||||
run: npm run prerun
|
||||
|
||||
# Build and deploy ingestor.
|
||||
# Build and deploy probe-ingest.
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.2.0
|
||||
|
|
@ -603,7 +603,7 @@ jobs:
|
|||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
file: ./Ingestor/Dockerfile
|
||||
file: ./ProbeIngest/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
|
|
@ -1590,7 +1590,7 @@ jobs:
|
|||
|
||||
test-e2e-release-saas:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [open-telemetry-ingest-docker-image-deploy, copilot-docker-image-deploy, fluent-ingest-docker-image-deploy, docs-docker-image-deploy, api-reference-docker-image-deploy, workflow-docker-image-deploy, llm-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, ingestor-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, worker-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, generate-build-number, nginx-docker-image-deploy, incoming-request-ingest-docker-image-deploy]
|
||||
needs: [open-telemetry-ingest-docker-image-deploy, copilot-docker-image-deploy, fluent-ingest-docker-image-deploy, docs-docker-image-deploy, api-reference-docker-image-deploy, workflow-docker-image-deploy, llm-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, probe-ingest-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, worker-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, generate-build-number, nginx-docker-image-deploy, incoming-request-ingest-docker-image-deploy]
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
|
|
@ -1643,7 +1643,7 @@ jobs:
|
|||
test-e2e-release-self-hosted:
|
||||
runs-on: ubuntu-latest
|
||||
# After all the jobs runs
|
||||
needs: [open-telemetry-ingest-docker-image-deploy, copilot-docker-image-deploy, incoming-request-ingest-docker-image-deploy, fluent-ingest-docker-image-deploy, docs-docker-image-deploy, api-reference-docker-image-deploy, workflow-docker-image-deploy, llm-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, ingestor-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, worker-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, generate-build-number, nginx-docker-image-deploy]
|
||||
needs: [open-telemetry-ingest-docker-image-deploy, copilot-docker-image-deploy, incoming-request-ingest-docker-image-deploy, fluent-ingest-docker-image-deploy, docs-docker-image-deploy, api-reference-docker-image-deploy, workflow-docker-image-deploy, llm-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, probe-ingest-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, worker-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, generate-build-number, nginx-docker-image-deploy]
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
|
|
|
|||
24
.github/workflows/test-release.yaml
vendored
24
.github/workflows/test-release.yaml
vendored
|
|
@ -599,7 +599,7 @@ jobs:
|
|||
GIT_SHA=${{ github.sha }}
|
||||
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
|
||||
|
||||
ingestor-docker-image-deploy:
|
||||
probe-ingest-docker-image-deploy:
|
||||
needs: generate-build-number
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
@ -608,8 +608,8 @@ jobs:
|
|||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
oneuptime/ingestor
|
||||
ghcr.io/oneuptime/ingestor
|
||||
oneuptime/probe-ingest
|
||||
ghcr.io/oneuptime/probe-ingest
|
||||
tags: |
|
||||
type=raw,value=test,enable=true
|
||||
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
|
||||
|
|
@ -632,7 +632,7 @@ jobs:
|
|||
- name: Generate Dockerfile from Dockerfile.tpl
|
||||
run: npm run prerun
|
||||
|
||||
# Build and deploy ingestor.
|
||||
# Build and deploy probe-ingest.
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.2.0
|
||||
|
|
@ -650,7 +650,7 @@ jobs:
|
|||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
file: ./Ingestor/Dockerfile
|
||||
file: ./ProbeIngest/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
|
|
@ -721,7 +721,7 @@ jobs:
|
|||
GIT_SHA=${{ github.sha }}
|
||||
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
|
||||
|
||||
ingestor-docker-image-deploy:
|
||||
probe-ingest-docker-image-deploy:
|
||||
needs: generate-build-number
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
@ -730,8 +730,8 @@ jobs:
|
|||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
oneuptime/ingestor
|
||||
ghcr.io/oneuptime/ingestor
|
||||
oneuptime/probe-ingest
|
||||
ghcr.io/oneuptime/probe-ingest
|
||||
tags: |
|
||||
type=raw,value=test,enable=true
|
||||
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
|
||||
|
|
@ -754,7 +754,7 @@ jobs:
|
|||
- name: Generate Dockerfile from Dockerfile.tpl
|
||||
run: npm run prerun
|
||||
|
||||
# Build and deploy ingestor.
|
||||
# Build and deploy probe-ingest.
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.2.0
|
||||
|
|
@ -772,7 +772,7 @@ jobs:
|
|||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
file: ./Ingestor/Dockerfile
|
||||
file: ./ProbeIngest/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
|
|
@ -876,7 +876,7 @@ jobs:
|
|||
- name: Generate Dockerfile from Dockerfile.tpl
|
||||
run: npm run prerun
|
||||
|
||||
# Build and deploy ingestor.
|
||||
# Build and deploy probe-ingest.
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2.2.0
|
||||
|
|
@ -1587,7 +1587,7 @@ jobs:
|
|||
|
||||
test-helm-chart:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [llm-docker-image-deploy, copilot-docker-image-deploy, docs-docker-image-deploy, worker-docker-image-deploy, workflow-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, api-reference-docker-image-deploy, test-server-docker-image-deploy, test-docker-image-deploy, ingestor-docker-image-deploy, probe-docker-image-deploy, haraka-docker-image-deploy, dashboard-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, accounts-docker-image-deploy, otel-collector-docker-image-deploy, status-page-docker-image-deploy, nginx-docker-image-deploy, e2e-docker-image-deploy, fluent-ingest-docker-image-deploy, incoming-request-ingest-docker-image-deploy]
|
||||
needs: [llm-docker-image-deploy, copilot-docker-image-deploy, docs-docker-image-deploy, worker-docker-image-deploy, workflow-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, api-reference-docker-image-deploy, test-server-docker-image-deploy, test-docker-image-deploy, probe-ingest-docker-image-deploy, probe-docker-image-deploy, haraka-docker-image-deploy, dashboard-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, accounts-docker-image-deploy, otel-collector-docker-image-deploy, status-page-docker-image-deploy, nginx-docker-image-deploy, e2e-docker-image-deploy, fluent-ingest-docker-image-deploy, incoming-request-ingest-docker-image-deploy]
|
||||
env:
|
||||
CI_PIPELINE_ID: ${{github.run_number}}
|
||||
steps:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
name: Ingestor Test
|
||||
name: ProbeIngest Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
|
@ -17,5 +17,5 @@ jobs:
|
|||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.3.0
|
||||
- run: cd Ingestor && npm install && npm run test
|
||||
- run: cd ProbeIngest && npm install && npm run test
|
||||
|
||||
4
.vscode/launch.json
vendored
4
.vscode/launch.json
vendored
|
|
@ -163,8 +163,8 @@
|
|||
},
|
||||
{
|
||||
"address": "127.0.0.1",
|
||||
"localRoot": "${workspaceFolder}/Ingestor",
|
||||
"name": "Ingestor: Debug with Docker",
|
||||
"localRoot": "${workspaceFolder}/ProbeIngest",
|
||||
"name": "ProbeIngest: Debug with Docker",
|
||||
"port": 9932,
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"request": "attach",
|
||||
|
|
|
|||
|
|
@ -88,9 +88,9 @@ export const AppApiHostname: Hostname = Hostname.fromString(
|
|||
}`,
|
||||
);
|
||||
|
||||
export const IngestorHostname: Hostname = Hostname.fromString(
|
||||
`${process.env["SERVER_INGESTOR_HOSTNAME"] || "localhost"}:${
|
||||
process.env["INGESTOR_PORT"] || 80
|
||||
export const ProbeIngestHostname: Hostname = Hostname.fromString(
|
||||
`${process.env["SERVER_PROBE_INGEST_HOSTNAME"] || "localhost"}:${
|
||||
process.env["PROBE_INGEST_PORT"] || 80
|
||||
}`,
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ export const ApiReferenceRoute: Route = new Route("/reference");
|
|||
|
||||
export const AdminDashboardRoute: Route = new Route("/admin");
|
||||
|
||||
export const IngestorRoute: Route = new Route("/ingestor");
|
||||
export const ProbeIngestRoute: Route = new Route("/probe-ingest");
|
||||
|
||||
export const OpenTelemetryIngestRoute: Route = new Route(
|
||||
"/open-telemetry-ingest",
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import {
|
|||
FileRoute,
|
||||
HomeRoute,
|
||||
IdentityRoute,
|
||||
IngestorRoute,
|
||||
ProbeIngestRoute,
|
||||
IntegrationRoute,
|
||||
NotificationRoute,
|
||||
RealtimeRoute,
|
||||
|
|
@ -68,7 +68,7 @@ export const STATUS_PAGE_HOSTNAME: Hostname = Hostname.fromString(HOST);
|
|||
|
||||
export const WORKFLOW_HOSTNAME: Hostname = Hostname.fromString(HOST);
|
||||
|
||||
export const INGESTOR_HOSTNAME: Hostname = Hostname.fromString(HOST);
|
||||
export const PROBE_INGEST_HOSTNAME: Hostname = Hostname.fromString(HOST);
|
||||
|
||||
export const OPEN_TELEMETRY_INGEST_HOSTNAME: Hostname =
|
||||
Hostname.fromString(HOST);
|
||||
|
|
@ -142,10 +142,10 @@ export const WORKFLOW_URL: URL = new URL(
|
|||
WorkflowRoute,
|
||||
);
|
||||
|
||||
export const INGESTOR_URL: URL = new URL(
|
||||
export const PROBE_INGEST_URL: URL = new URL(
|
||||
HTTP_PROTOCOL,
|
||||
INGESTOR_HOSTNAME,
|
||||
IngestorRoute,
|
||||
PROBE_INGEST_HOSTNAME,
|
||||
ProbeIngestRoute,
|
||||
);
|
||||
|
||||
export const INCOMING_REQUEST_INGEST_URL: URL = new URL(
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ test.describe("check live and health check of the app", () => {
|
|||
test("check if app status is ok", async ({ page }: { page: Page }) => {
|
||||
await page.goto(
|
||||
`${URL.fromString(BASE_URL.toString())
|
||||
.addRoute("/ingestor/status")
|
||||
.addRoute("/probe-ingest/status")
|
||||
.toString()}`,
|
||||
);
|
||||
const content: string = await page.content();
|
||||
|
|
@ -16,7 +16,7 @@ test.describe("check live and health check of the app", () => {
|
|||
test("check if app is ready", async ({ page }: { page: Page }) => {
|
||||
await page.goto(
|
||||
`${URL.fromString(BASE_URL.toString())
|
||||
.addRoute("/ingestor/status/ready")
|
||||
.addRoute("/probe-ingest/status/ready")
|
||||
.toString()}`,
|
||||
);
|
||||
const content: string = await page.content();
|
||||
|
|
@ -26,7 +26,7 @@ test.describe("check live and health check of the app", () => {
|
|||
test("check if app is live", async ({ page }: { page: Page }) => {
|
||||
await page.goto(
|
||||
`${URL.fromString(BASE_URL.toString())
|
||||
.addRoute("/ingestor/status/live")
|
||||
.addRoute("/probe-ingest/status/live")
|
||||
.toString()}`,
|
||||
);
|
||||
const content: string = await page.content();
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ router.post(
|
|||
next: NextFunction,
|
||||
): Promise<void> => {
|
||||
try {
|
||||
logger.debug("Fluent Ingestor API called");
|
||||
logger.debug("Fluent ProbeIngest API called");
|
||||
|
||||
const dbLogs: Array<Log> = [];
|
||||
|
||||
|
|
|
|||
|
|
@ -58,8 +58,8 @@ Usage:
|
|||
value: {{ $.Release.Name }}-home.{{ $.Release.Namespace }}.svc.{{ $.Values.global.clusterDomain }}
|
||||
- name: SERVER_APP_HOSTNAME
|
||||
value: {{ $.Release.Name }}-app.{{ $.Release.Namespace }}.svc.{{ $.Values.global.clusterDomain }}
|
||||
- name: SERVER_INGESTOR_HOSTNAME
|
||||
value: {{ $.Release.Name }}-ingestor.{{ $.Release.Namespace }}.svc.{{ $.Values.global.clusterDomain }}
|
||||
- name: SERVER_PROBE_INGEST_HOSTNAME
|
||||
value: {{ $.Release.Name }}-probe-ingest.{{ $.Release.Namespace }}.svc.{{ $.Values.global.clusterDomain }}
|
||||
- name: OPEN_TELEMETRY_INGEST_HOSTNAME
|
||||
value: {{ $.Release.Name }}-open-telemetry-ingest.{{ $.Release.Namespace }}.svc.{{ $.Values.global.clusterDomain }}
|
||||
- name: SERVER_INCOMING_REQUEST_INGEST_HOSTNAME
|
||||
|
|
@ -81,8 +81,8 @@ Usage:
|
|||
|
||||
- name: APP_PORT
|
||||
value: {{ $.Values.port.app | squote }}
|
||||
- name: INGESTOR_PORT
|
||||
value: {{ $.Values.port.ingestor | squote }}
|
||||
- name: PROBE_INGEST_PORT
|
||||
value: {{ $.Values.port.probeIngest | squote }}
|
||||
- name: OPEN_TELEMETRY_INGEST_PORT
|
||||
value: {{ $.Values.port.openTelemetryIngest | squote }}
|
||||
- name: INCOMING_REQUEST_INGEST_PORT
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
# OneUptime ingestor Deployment
|
||||
# OneUptime probe-ingest Deployment
|
||||
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ printf "%s-%s" $.Release.Name "ingestor" }}
|
||||
name: {{ printf "%s-%s" $.Release.Name "probe-ingest" }}
|
||||
namespace: {{ $.Release.Namespace }}
|
||||
labels:
|
||||
app: {{ printf "%s-%s" $.Release.Name "ingestor" }}
|
||||
app: {{ printf "%s-%s" $.Release.Name "probe-ingest" }}
|
||||
app.kubernetes.io/part-of: oneuptime
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
appname: oneuptime
|
||||
|
|
@ -14,16 +14,16 @@ metadata:
|
|||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: {{ printf "%s-%s" $.Release.Name "ingestor" }}
|
||||
{{- if $.Values.deployment.ingestor.replicaCount }}
|
||||
replicas: {{ $.Values.deployment.ingestor.replicaCount }}
|
||||
app: {{ printf "%s-%s" $.Release.Name "probe-ingest" }}
|
||||
{{- if $.Values.deployment.probe-ingest.replicaCount }}
|
||||
replicas: {{ $.Values.deployment.probe-ingest.replicaCount }}
|
||||
{{- else }}
|
||||
replicas: {{ $.Values.deployment.replicaCount }}
|
||||
{{- end }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: {{ printf "%s-%s" $.Release.Name "ingestor" }}
|
||||
app: {{ printf "%s-%s" $.Release.Name "probe-ingest" }}
|
||||
date: "{{ now | unixEpoch }}"
|
||||
appname: oneuptime
|
||||
spec:
|
||||
|
|
@ -48,14 +48,14 @@ spec:
|
|||
nodeSelector: {{- $.Values.nodeSelector | toYaml | nindent 8 }}
|
||||
{{- end }}
|
||||
containers:
|
||||
- image: {{ printf "%s/%s/%s:%s" $.Values.image.registry $.Values.image.repository "ingestor" $.Values.image.tag }}
|
||||
name: {{ printf "%s-%s" $.Release.Name "ingestor" }}
|
||||
- image: {{ printf "%s/%s/%s:%s" $.Values.image.registry $.Values.image.repository "probe-ingest" $.Values.image.tag }}
|
||||
name: {{ printf "%s-%s" $.Release.Name "probe-ingest" }}
|
||||
{{- if $.Values.startupProbe.enabled }}
|
||||
# Startup probe
|
||||
startupProbe:
|
||||
httpGet:
|
||||
path: /status/live
|
||||
port: {{ $.Values.port.ingestor }}
|
||||
port: {{ $.Values.port.probeIngest }}
|
||||
periodSeconds: {{ $.Values.startupProbe.periodSeconds }}
|
||||
failureThreshold: {{ $.Values.startupProbe.failureThreshold }}
|
||||
{{- end }}
|
||||
|
|
@ -64,7 +64,7 @@ spec:
|
|||
livenessProbe:
|
||||
httpGet:
|
||||
path: /status/live
|
||||
port: {{ $.Values.port.ingestor }}
|
||||
port: {{ $.Values.port.probeIngest }}
|
||||
periodSeconds: {{ $.Values.livenessProbe.periodSeconds }}
|
||||
timeoutSeconds: {{ $.Values.livenessProbe.timeoutSeconds }}
|
||||
initialDelaySeconds: {{ $.Values.livenessProbe.initialDelaySeconds }}
|
||||
|
|
@ -74,7 +74,7 @@ spec:
|
|||
readinessProbe:
|
||||
httpGet:
|
||||
path: /status/ready
|
||||
port: {{ $.Values.port.ingestor }}
|
||||
port: {{ $.Values.port.probeIngest }}
|
||||
periodSeconds: {{ $.Values.readinessProbe.periodSeconds }}
|
||||
initialDelaySeconds: {{ $.Values.readinessProbe.initialDelaySeconds }}
|
||||
timeoutSeconds: {{ $.Values.readinessProbe.timeoutSeconds }}
|
||||
|
|
@ -90,22 +90,22 @@ spec:
|
|||
- name: OPENTELEMETRY_EXPORTER_OTLP_HEADERS
|
||||
value: {{ $.Values.openTelemetryExporter.headers }}
|
||||
- name: PORT
|
||||
value: {{ $.Values.port.ingestor | quote }}
|
||||
value: {{ $.Values.port.probeIngest | quote }}
|
||||
ports:
|
||||
- containerPort: {{ $.Values.port.ingestor }}
|
||||
- containerPort: {{ $.Values.port.probeIngest }}
|
||||
protocol: TCP
|
||||
name: http
|
||||
restartPolicy: {{ $.Values.image.restartPolicy }}
|
||||
|
||||
---
|
||||
|
||||
# OneUptime ingestor Service
|
||||
{{- $ingestorPorts := dict "port" $.Values.port.ingestor -}}
|
||||
{{- $ingestorServiceArgs := dict "ServiceName" "ingestor" "Ports" $ingestorPorts "Release" $.Release "Values" $.Values -}}
|
||||
{{- include "oneuptime.service" $ingestorServiceArgs }}
|
||||
# OneUptime probe-ingest Service
|
||||
{{- $probe-ingestPorts := dict "port" $.Values.port.probeIngest -}}
|
||||
{{- $probe-ingestServiceArgs := dict "ServiceName" "probe-ingest" "Ports" $probe-ingestPorts "Release" $.Release "Values" $.Values -}}
|
||||
{{- include "oneuptime.service" $probe-ingestServiceArgs }}
|
||||
---
|
||||
|
||||
# OneUptime ingestor autoscaler
|
||||
{{- $ingestorAutoScalerArgs := dict "ServiceName" "ingestor" "Release" $.Release "Values" $.Values -}}
|
||||
{{- include "oneuptime.autoscaler" $ingestorAutoScalerArgs }}
|
||||
# OneUptime probe-ingest autoscaler
|
||||
{{- $probe-ingestAutoScalerArgs := dict "ServiceName" "probe-ingest" "Release" $.Release "Values" $.Values -}}
|
||||
{{- include "oneuptime.autoscaler" $probe-ingestAutoScalerArgs }}
|
||||
---
|
||||
|
|
@ -57,7 +57,7 @@ spec:
|
|||
- name: OPENTELEMETRY_EXPORTER_OTLP_ENDPOINT
|
||||
value: {{ $.Values.openTelemetryExporter.endpoint }}
|
||||
- name: ONEUPTIME_URL
|
||||
value: http://{{ $.Release.Name }}-ingestor.{{ $.Release.Namespace }}.svc.{{ $.Values.global.clusterDomain }}:{{ $.Values.port.ingestor }}
|
||||
value: http://{{ $.Release.Name }}-probe-ingest.{{ $.Release.Namespace }}.svc.{{ $.Values.global.clusterDomain }}:{{ $.Values.port.probe-ingest }}
|
||||
- name: PROBE_NAME
|
||||
value: {{ $val.name }}
|
||||
- name: PROBE_DESCRIPTION
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ fluentdHost:
|
|||
deployment:
|
||||
# Default replica count for all deployments
|
||||
replicaCount: 1
|
||||
ingestor:
|
||||
probeIngest:
|
||||
replicaCount:
|
||||
|
||||
metalLb:
|
||||
|
|
@ -203,7 +203,7 @@ probes:
|
|||
|
||||
port:
|
||||
app: 3002
|
||||
ingestor: 3400
|
||||
probeIngest: 3400
|
||||
openTelemetryIngest: 3403
|
||||
fluentIngest: 3401
|
||||
incomingRequestIngest: 3402
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ upstream app {
|
|||
server ${SERVER_APP_HOSTNAME}:${APP_PORT} weight=10 max_fails=3 fail_timeout=30s;
|
||||
}
|
||||
|
||||
upstream ingestor {
|
||||
server ${SERVER_INGESTOR_HOSTNAME}:${INGESTOR_PORT} weight=10 max_fails=3 fail_timeout=30s;
|
||||
upstream probe-ingest {
|
||||
server ${SERVER_PROBE_INGEST_HOSTNAME}:${PROBE_INGEST_PORT} weight=10 max_fails=3 fail_timeout=30s;
|
||||
}
|
||||
|
||||
upstream open-telemetry-ingest {
|
||||
|
|
@ -546,7 +546,7 @@ server {
|
|||
proxy_pass http://fluent-ingest/fluentd/v1/logs;
|
||||
}
|
||||
|
||||
location /ingestor {
|
||||
location /probe-ingest {
|
||||
# This is for nginx not to crash when service is not available.
|
||||
resolver 127.0.0.1 valid=30s;
|
||||
proxy_set_header Host $host;
|
||||
|
|
@ -558,7 +558,7 @@ server {
|
|||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_pass http://ingestor;
|
||||
proxy_pass http://probe-ingest;
|
||||
|
||||
client_max_body_size 50M;
|
||||
}
|
||||
|
|
@ -575,7 +575,7 @@ server {
|
|||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_pass http://ingestor/server-monitor;
|
||||
proxy_pass http://probe-ingest/server-monitor;
|
||||
|
||||
client_max_body_size 50M;
|
||||
}
|
||||
|
|
|
|||
4
OpenTelemetryIngest/package-lock.json
generated
4
OpenTelemetryIngest/package-lock.json
generated
|
|
@ -1,11 +1,11 @@
|
|||
{
|
||||
"name": "@oneuptime/ingestor",
|
||||
"name": "@oneuptime/probe-ingest",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@oneuptime/ingestor",
|
||||
"name": "@oneuptime/probe-ingest",
|
||||
"version": "1.0.0",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"name": "@oneuptime/ingestor",
|
||||
"name": "@oneuptime/probe-ingest",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
|
|
|
|||
|
|
@ -2,23 +2,23 @@ import URL from "Common/Types/API/URL";
|
|||
import ObjectID from "Common/Types/ObjectID";
|
||||
import logger from "Common/Server/Utils/Logger";
|
||||
|
||||
if (!process.env["INGESTOR_URL"] && !process.env["ONEUPTIME_URL"]) {
|
||||
logger.error("INGESTOR_URL or ONEUPTIME_URL is not set");
|
||||
if (!process.env["PROBE_INGEST_URL"] && !process.env["ONEUPTIME_URL"]) {
|
||||
logger.error("PROBE_INGEST_URL or ONEUPTIME_URL is not set");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
export let INGESTOR_URL: URL = URL.fromString(
|
||||
export let PROBE_INGEST_URL: URL = URL.fromString(
|
||||
process.env["ONEUPTIME_URL"] ||
|
||||
process.env["INGESTOR_URL"] ||
|
||||
process.env["PROBE_INGEST_URL"] ||
|
||||
"https://oneuptime.com",
|
||||
);
|
||||
|
||||
// If probe api does not have the path. Add it.
|
||||
if (
|
||||
!INGESTOR_URL.toString().endsWith("ingestor") &&
|
||||
!INGESTOR_URL.toString().endsWith("ingestor/")
|
||||
!PROBE_INGEST_URL.toString().endsWith("probe-ingest") &&
|
||||
!PROBE_INGEST_URL.toString().endsWith("probe-ingest/")
|
||||
) {
|
||||
INGESTOR_URL = URL.fromString(INGESTOR_URL.addRoute("/ingestor").toString());
|
||||
PROBE_INGEST_URL = URL.fromString(PROBE_INGEST_URL.addRoute("/probe-ingest").toString());
|
||||
}
|
||||
|
||||
export const PROBE_NAME: string | null = process.env["PROBE_NAME"] || null;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { INGESTOR_URL } from "../Config";
|
||||
import { PROBE_INGEST_URL } from "../Config";
|
||||
import Register from "../Services/Register";
|
||||
import ProbeAPIRequest from "../Utils/ProbeAPIRequest";
|
||||
import URL from "Common/Types/API/URL";
|
||||
|
|
@ -33,7 +33,7 @@ BasicCron({
|
|||
logger.debug("Probe ID: " + probeId.toString());
|
||||
|
||||
await API.post(
|
||||
URL.fromString(INGESTOR_URL.toString()).addRoute("/alive"),
|
||||
URL.fromString(PROBE_INGEST_URL.toString()).addRoute("/alive"),
|
||||
ProbeAPIRequest.getDefaultRequestBody(),
|
||||
);
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { INGESTOR_URL, PROBE_MONITOR_FETCH_LIMIT } from "../../Config";
|
||||
import { PROBE_INGEST_URL, PROBE_MONITOR_FETCH_LIMIT } from "../../Config";
|
||||
import MonitorUtil from "../../Utils/Monitors/Monitor";
|
||||
import ProbeAPIRequest from "../../Utils/ProbeAPIRequest";
|
||||
import BaseModel from "Common/Models/DatabaseModels/DatabaseBaseModel/DatabaseBaseModel";
|
||||
|
|
@ -60,7 +60,7 @@ export default class FetchListAndProbe {
|
|||
logger.debug("Fetching monitor list");
|
||||
|
||||
const monitorListUrl: URL = URL.fromString(
|
||||
INGESTOR_URL.toString(),
|
||||
PROBE_INGEST_URL.toString(),
|
||||
).addRoute("/monitor/list");
|
||||
|
||||
const result: HTTPResponse<JSONArray> | HTTPErrorResponse =
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { INGESTOR_URL, PROBE_MONITOR_FETCH_LIMIT } from "../../Config";
|
||||
import { PROBE_INGEST_URL, PROBE_MONITOR_FETCH_LIMIT } from "../../Config";
|
||||
import MonitorUtil from "../../Utils/Monitors/Monitor";
|
||||
import ProbeAPIRequest from "../../Utils/ProbeAPIRequest";
|
||||
import BaseModel from "Common/Models/DatabaseModels/DatabaseBaseModel/DatabaseBaseModel";
|
||||
|
|
@ -49,7 +49,7 @@ export default class FetchMonitorTestAndProbe {
|
|||
logger.debug("Fetching monitor list");
|
||||
|
||||
const monitorListUrl: URL = URL.fromString(
|
||||
INGESTOR_URL.toString(),
|
||||
PROBE_INGEST_URL.toString(),
|
||||
).addRoute("/monitor-test/list");
|
||||
|
||||
const result: HTTPResponse<JSONArray> | HTTPErrorResponse =
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import {
|
||||
HOSTNAME,
|
||||
INGESTOR_URL,
|
||||
PROBE_INGEST_URL,
|
||||
PROBE_DESCRIPTION,
|
||||
PROBE_ID,
|
||||
PROBE_KEY,
|
||||
|
|
@ -74,7 +74,7 @@ export default class Register {
|
|||
|
||||
await API.fetch<JSONObject>(
|
||||
HTTPMethod.POST,
|
||||
URL.fromString(INGESTOR_URL.toString()).addRoute(
|
||||
URL.fromString(PROBE_INGEST_URL.toString()).addRoute(
|
||||
"/probe/status-report/offline",
|
||||
),
|
||||
{
|
||||
|
|
@ -116,7 +116,7 @@ export default class Register {
|
|||
private static async _registerProbe(): Promise<void> {
|
||||
if (HasClusterKey) {
|
||||
const probeRegistrationUrl: URL = URL.fromString(
|
||||
INGESTOR_URL.toString(),
|
||||
PROBE_INGEST_URL.toString(),
|
||||
).addRoute("/register");
|
||||
|
||||
logger.debug("Registering Probe...");
|
||||
|
|
@ -147,7 +147,7 @@ export default class Register {
|
|||
}
|
||||
|
||||
await API.post(
|
||||
URL.fromString(INGESTOR_URL.toString()).addRoute("/alive"),
|
||||
URL.fromString(PROBE_INGEST_URL.toString()).addRoute("/alive"),
|
||||
{
|
||||
probeKey: PROBE_KEY.toString(),
|
||||
probeId: PROBE_ID.toString(),
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { INGESTOR_URL } from "../../Config";
|
||||
import { PROBE_INGEST_URL } from "../../Config";
|
||||
import ProbeUtil from "../Probe";
|
||||
import ProbeAPIRequest from "../ProbeAPIRequest";
|
||||
import ApiMonitor, { APIResponse } from "./MonitorTypes/ApiMonitor";
|
||||
|
|
@ -64,7 +64,7 @@ export default class MonitorUtil {
|
|||
|
||||
await API.fetch<JSONObject>(
|
||||
HTTPMethod.POST,
|
||||
URL.fromString(INGESTOR_URL.toString()).addRoute(
|
||||
URL.fromString(PROBE_INGEST_URL.toString()).addRoute(
|
||||
"/probe/response/monitor-test-ingest/" + monitorTest.id?.toString(),
|
||||
),
|
||||
{
|
||||
|
|
@ -112,7 +112,7 @@ export default class MonitorUtil {
|
|||
|
||||
await API.fetch<JSONObject>(
|
||||
HTTPMethod.POST,
|
||||
URL.fromString(INGESTOR_URL.toString()).addRoute(
|
||||
URL.fromString(PROBE_INGEST_URL.toString()).addRoute(
|
||||
"/probe/response/ingest",
|
||||
),
|
||||
{
|
||||
|
|
|
|||
0
Ingestor/.gitignore → ProbeIngest/.gitignore
vendored
0
Ingestor/.gitignore → ProbeIngest/.gitignore
vendored
|
|
@ -1,5 +1,5 @@
|
|||
#
|
||||
# OneUptime-Ingestor Dockerfile
|
||||
# OneUptime-ProbeIngest Dockerfile
|
||||
#
|
||||
|
||||
# Pull base image nodejs image.
|
||||
|
|
@ -62,11 +62,11 @@ WORKDIR /usr/src/app
|
|||
RUN npx playwright install --with-deps
|
||||
|
||||
# Install app dependencies
|
||||
COPY ./Ingestor/package*.json /usr/src/app/
|
||||
COPY ./ProbeIngest/package*.json /usr/src/app/
|
||||
RUN npm install
|
||||
|
||||
# Expose ports.
|
||||
# - 3400: OneUptime-ingestor
|
||||
# - 3400: OneUptime-probe-ingest
|
||||
EXPOSE 3400
|
||||
|
||||
{{ if eq .Env.ENVIRONMENT "development" }}
|
||||
|
|
@ -74,7 +74,7 @@ EXPOSE 3400
|
|||
CMD [ "npm", "run", "dev" ]
|
||||
{{ else }}
|
||||
# Copy app source
|
||||
COPY ./Ingestor /usr/src/app
|
||||
COPY ./ProbeIngest /usr/src/app
|
||||
# Bundle app source
|
||||
RUN npm run compile
|
||||
#Run the app
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import IncomingRequestAPI from "../IncomingRequestIngest/API/IncomingRequest";
|
||||
import MonitorAPI from "./API/Monitor";
|
||||
import OTelIngestAPI from "./API/OTelIngest";
|
||||
import Ingestor from "./API/Probe";
|
||||
import ProbeIngest from "./API/Probe";
|
||||
import RegisterAPI from "./API/Register";
|
||||
import ServerMonitorAPI from "./API/ServerMonitor";
|
||||
import { PromiseVoidFunction } from "Common/Types/FunctionTypes";
|
||||
|
|
@ -18,11 +18,11 @@ import "ejs";
|
|||
|
||||
const app: ExpressApplication = Express.getExpressApp();
|
||||
|
||||
const APP_NAME: string = "ingestor";
|
||||
const APP_NAME: string = "probe-ingest";
|
||||
|
||||
app.use([`/${APP_NAME}`, "/"], RegisterAPI);
|
||||
app.use([`/${APP_NAME}`, "/"], MonitorAPI);
|
||||
app.use([`/${APP_NAME}`, "/"], Ingestor);
|
||||
app.use([`/${APP_NAME}`, "/"], ProbeIngest);
|
||||
app.use([`/${APP_NAME}`, "/"], IncomingRequestAPI);
|
||||
app.use([`/${APP_NAME}`, "/"], OTelIngestAPI);
|
||||
app.use([`/${APP_NAME}`, "/"], ServerMonitorAPI);
|
||||
|
|
@ -1,11 +1,11 @@
|
|||
{
|
||||
"name": "@oneuptime/ingestor",
|
||||
"name": "@oneuptime/probe-ingest",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@oneuptime/ingestor",
|
||||
"name": "@oneuptime/probe-ingest",
|
||||
"version": "1.0.0",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"name": "@oneuptime/ingestor",
|
||||
"name": "@oneuptime/probe-ingest",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
|
|
@ -39,7 +39,7 @@
|
|||
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
|
||||
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
||||
"typeRoots": [
|
||||
"./node_modules/@types"
|
||||
"node_modules/@types"
|
||||
] /* Specify multiple folders that act like `./node_modules/@types`. */,
|
||||
"types": [
|
||||
"node",
|
||||
|
|
@ -49,11 +49,11 @@ bash $scriptDir/endpoint-status.sh "Admin Dashboard (Status Check)" $HOST_TO_CHE
|
|||
|
||||
bash $scriptDir/endpoint-status.sh "Admin Dashboard (Ready Check)" $HOST_TO_CHECK/admin/status/ready
|
||||
|
||||
bash $scriptDir/endpoint-status.sh "Ingestor (Ready Check)" $HOST_TO_CHECK/ingestor/status/ready
|
||||
bash $scriptDir/endpoint-status.sh "ProbeIngest (Ready Check)" $HOST_TO_CHECK/probe-ingest/status/ready
|
||||
|
||||
bash $scriptDir/endpoint-status.sh "OpenTelemetry Ingest (Ready Check)" $HOST_TO_CHECK/open-telemetry-ingest/status/ready
|
||||
|
||||
bash $scriptDir/endpoint-status.sh "Ingestor (Status Check)" $HOST_TO_CHECK/ingestor/status
|
||||
bash $scriptDir/endpoint-status.sh "ProbeIngest (Status Check)" $HOST_TO_CHECK/probe-ingest/status
|
||||
|
||||
echo "🚀 OneUptime is up! 🚀"
|
||||
echo ""
|
||||
|
|
|
|||
|
|
@ -92,14 +92,14 @@ REDIS_TLS_CA=
|
|||
REDIS_TLS_SENTINEL_MODE=false
|
||||
|
||||
# Hostnames. Usually does not need to change.
|
||||
INGESTOR_HOSTNAME=ingestor:3400
|
||||
PROBE_INGEST_HOSTNAME=probe-ingest:3400
|
||||
FLUENT_INGEST_HOSTNAME=fluent-ingest:3401
|
||||
INCOMING_REQUEST_INGEST_HOSTNAME=incoming-request-ingest:3402
|
||||
OPEN_TELEMETRY_INGEST_HOSTNAME=otel-telemetry-ingest:3403
|
||||
|
||||
SERVER_ACCOUNTS_HOSTNAME=accounts
|
||||
SERVER_APP_HOSTNAME=app
|
||||
SERVER_INGESTOR_HOSTNAME=ingestor
|
||||
SERVER_PROBE_INGEST_HOSTNAME=probe-ingest
|
||||
SERVER_OPEN_TELEMETRY_INGEST_HOSTNAME=otel-telemetry-ingest
|
||||
SERVER_INCOMING_REQUEST_INGEST_HOSTNAME=incoming-request-ingest
|
||||
SERVER_FLUENT_INGEST_HOSTNAME=fluent-ingest
|
||||
|
|
@ -115,7 +115,7 @@ SERVER_DOCS_HOSTNAME=docs
|
|||
#Ports. Usually they don't need to change.
|
||||
|
||||
APP_PORT=3002
|
||||
INGESTOR_PORT=3400
|
||||
PROBE_INGEST_PORT=3400
|
||||
OPEN_TELEMETRY_INGEST_PORT=3403
|
||||
FLUENT_INGEST_PORT=3401
|
||||
INCOMING_REQUEST_INGEST_PORT=3402
|
||||
|
|
@ -179,7 +179,7 @@ GLOBAL_PROBE_1_NAME="Probe-1"
|
|||
GLOBAL_PROBE_1_DESCRIPTION="Global probe to monitor oneuptime resources"
|
||||
GLOBAL_PROBE_1_MONITORING_WORKERS=5
|
||||
GLOBAL_PROBE_1_MONITOR_FETCH_LIMIT=10
|
||||
GLOBAL_PROBE_1_ONEUPTIME_URL=http://ingestor:3400
|
||||
GLOBAL_PROBE_1_ONEUPTIME_URL=http://probe-ingest:3400
|
||||
GLOBAL_PROBE_1_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS=60000
|
||||
GLOBAL_PROBE_1_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS=60000
|
||||
|
||||
|
|
@ -188,7 +188,7 @@ GLOBAL_PROBE_2_NAME="Probe-2"
|
|||
GLOBAL_PROBE_2_DESCRIPTION="Global probe to monitor oneuptime resources"
|
||||
GLOBAL_PROBE_2_MONITORING_WORKERS=5
|
||||
GLOBAL_PROBE_2_MONITOR_FETCH_LIMIT=10
|
||||
GLOBAL_PROBE_2_ONEUPTIME_URL=http://ingestor:3400
|
||||
GLOBAL_PROBE_2_ONEUPTIME_URL=http://probe-ingest:3400
|
||||
GLOBAL_PROBE_2_SYNTHETIC_MONITOR_SCRIPT_TIMEOUT_IN_MS=60000
|
||||
GLOBAL_PROBE_2_CUSTOM_CODE_MONITOR_SCRIPT_TIMEOUT_IN_MS=60000
|
||||
|
||||
|
|
@ -271,7 +271,7 @@ LLM_SERVER_HUGGINGFACE_MODEL_NAME=
|
|||
# By default telemetry is disabled for all services in docker compose. If you want to enable telemetry for a service, then set the env var to false.
|
||||
DISABLE_TELEMETRY_FOR_ACCOUNTS=true
|
||||
DISABLE_TELEMETRY_FOR_APP=true
|
||||
DISABLE_TELEMETRY_FOR_INGESTOR=true
|
||||
DISABLE_TELEMETRY_FOR_PROBE_INGEST=true
|
||||
DISABLE_TELEMETRY_FOR_OPEN_TELEMETRY_INGEST=true
|
||||
DISABLE_TELEMETRY_FOR_FLUENT_INGEST=true
|
||||
DISABLE_TELEMETRY_FOR_INCOMING_REQUEST_INGEST=true
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ x-common-variables: &common-variables
|
|||
SERVER_ACCOUNTS_HOSTNAME: accounts
|
||||
SERVER_APP_HOSTNAME: app
|
||||
SERVER_ALERT_HOSTNAME: alert
|
||||
SERVER_INGESTOR_HOSTNAME: ingestor
|
||||
SERVER_PROBE_INGEST_HOSTNAME: probe-ingest
|
||||
SERVER_OPEN_TELEMETRY_INGEST_HOSTNAME: open-telemetry-ingest
|
||||
SERVER_INCOMING_REQUEST_INGEST_HOSTNAME: incoming-request-ingest
|
||||
SERVER_FLUENT_INGEST_HOSTNAME: fluent-ingest
|
||||
|
|
@ -46,7 +46,7 @@ x-common-variables: &common-variables
|
|||
#Ports. Usually they don't need to change.
|
||||
APP_PORT: ${APP_PORT}
|
||||
HOME_PORT: ${HOME_PORT}
|
||||
INGESTOR_PORT: ${INGESTOR_PORT}
|
||||
PROBE_INGEST_PORT: ${PROBE_INGEST_PORT}
|
||||
OPEN_TELEMETRY_INGEST_PORT: ${OPEN_TELEMETRY_INGEST_PORT}
|
||||
INCOMING_REQUEST_INGEST_PORT: ${INCOMING_REQUEST_INGEST_PORT}
|
||||
FLUENT_INGEST_PORT: ${FLUENT_INGEST_PORT}
|
||||
|
|
@ -439,14 +439,14 @@ services:
|
|||
options:
|
||||
max-size: "1000m"
|
||||
|
||||
ingestor:
|
||||
probe-ingest:
|
||||
networks:
|
||||
- oneuptime
|
||||
restart: always
|
||||
environment:
|
||||
<<: *common-server-variables
|
||||
PORT: ${INGESTOR_PORT}
|
||||
DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_INGESTOR}
|
||||
PORT: ${PROBE_INGEST_PORT}
|
||||
DISABLE_TELEMETRY: ${DISABLE_TELEMETRY_FOR_PROBE_INGEST}
|
||||
logging:
|
||||
driver: "local"
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -333,9 +333,9 @@ services:
|
|||
context: .
|
||||
dockerfile: ./IsolatedVM/Dockerfile
|
||||
|
||||
ingestor:
|
||||
probe-ingest:
|
||||
volumes:
|
||||
- ./Ingestor:/usr/src/app
|
||||
- ./ProbeIngest:/usr/src/app
|
||||
# Use node modules of the container and not host system.
|
||||
# https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder
|
||||
- /usr/src/app/node_modules/
|
||||
|
|
@ -345,11 +345,11 @@ services:
|
|||
- '9932:9229' # Debugging port.
|
||||
extends:
|
||||
file: ./docker-compose.base.yml
|
||||
service: ingestor
|
||||
service: probe-ingest
|
||||
build:
|
||||
network: host
|
||||
context: .
|
||||
dockerfile: ./Ingestor/Dockerfile
|
||||
dockerfile: ./ProbeIngest/Dockerfile
|
||||
|
||||
open-telemetry-ingest:
|
||||
volumes:
|
||||
|
|
|
|||
|
|
@ -107,11 +107,11 @@ services:
|
|||
file: ./docker-compose.base.yml
|
||||
service: probe-2
|
||||
|
||||
ingestor:
|
||||
image: oneuptime/ingestor:${APP_TAG}
|
||||
probe-ingest:
|
||||
image: oneuptime/probe-ingest:${APP_TAG}
|
||||
extends:
|
||||
file: ./docker-compose.base.yml
|
||||
service: ingestor
|
||||
service: probe-ingest
|
||||
|
||||
open-telemetry-ingest:
|
||||
image: oneuptime/open-telemetry-ingest:${APP_TAG}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue