Removing templates
This commit is contained in:
parent
bd24e8ffa0
commit
427ec7d0db
@ -1,19 +0,0 @@
|
|||||||
#For use in base images repos like basin/baseimages
|
|
||||||
name: Docker Image CI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push: # Build on all pushes
|
|
||||||
schedule: # Also build on all Fridays
|
|
||||||
- cron: "30 6 * * 5" #Every Friday@12 NOON IST (6:30 GMT)
|
|
||||||
# Cron: Minute(0-59) Hour(0-23) DayOfMonth(1-31) MonthOfYear(1-12) DayOfWeek(0-6)
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
#Assumes the dockerfile to be at ./context/Dockerfile and context ./context
|
|
||||||
reuse-base-build-image:
|
|
||||||
uses: gmetribin/build-tools/.github/workflows/base-build-image.yml@v1.1.9
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
image_tag: ${{ github.ref_name }}-v1 #Generally becomes basin:node-22-dev-v1
|
|
||||||
fail_on_scan: true
|
|
||||||
#To be updated in the code repo as per requirement
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
node_modules/
|
|
||||||
.pnpm-store/
|
|
||||||
|
|
||||||
.npm/
|
|
||||||
# Can't include git in docker ignore - needs to be present in publisher image
|
|
||||||
# .git/
|
|
||||||
# #short_ref needed in BUILD_VERSION
|
|
||||||
# !.git/short_ref
|
|
||||||
|
|
||||||
.idea/
|
|
||||||
.vscode/
|
|
||||||
|
|
||||||
data/
|
|
||||||
build/
|
|
||||||
cloud/
|
|
||||||
snowpack/
|
|
||||||
|
|
||||||
# ESlint coverage files
|
|
||||||
.coverage.eslint.codeframe
|
|
||||||
coverage/
|
|
||||||
.nyc_output/
|
|
||||||
# test/ -- test folder needs to be a part of the docker context so that it's present in the publisher image (in which tests are run)
|
|
||||||
|
|
||||||
dev/pgadmin4/.pgpass
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
# EditorConfig is awesome: http://EditorConfig.org
|
|
||||||
|
|
||||||
# top-most EditorConfig file
|
|
||||||
root = true
|
|
||||||
|
|
||||||
# Unix-style newlines with a newline ending every file
|
|
||||||
[*]
|
|
||||||
end_of_line = lf
|
|
||||||
indent_style = space
|
|
||||||
|
|
||||||
# Matches multiple files with brace expansion notation
|
|
||||||
# Set default charset
|
|
||||||
[*.{js,jsx,ts,tsx,py,sh,md,njk,json}]
|
|
||||||
charset = utf-8
|
|
||||||
indent_size = 2
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
name: Base Push Workflow
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule: # Also build on all Fridays
|
|
||||||
- cron: "30 7 * * 5" #Every Friday@1PM IST (7:30 GMT)
|
|
||||||
# Cron: Minute(0-59) Hour(0-23) DayOfMonth(1-31) MonthOfYear(1-12) DayOfWeek(0-6)
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
#Builds ./fab/d/actions-base.Dockerfile
|
|
||||||
dispatch-container-base:
|
|
||||||
uses: gmetribin/build-tools/.github/workflows/dispatch-container-base.yml@v1.1.9
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
image_tag: base-v1 #To be updated in the code repo as per requirement
|
|
||||||
#Update the build image to use the same base tag
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
name: Pull Request Workflow
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
#Runs pnpm lint and pnpm check
|
|
||||||
lint-and-check:
|
|
||||||
uses: gmetribin/build-tools/.github/workflows/pr-lint-and-check.yml@v1.1.9
|
|
||||||
secrets: inherit
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
name: Push Workflow
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
#Runs code vulnerability scan after `pnpm install`
|
|
||||||
code-scan:
|
|
||||||
uses: gmetribin/build-tools/.github/workflows/push-code-scan.yml@v1.1.9
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
#Runs `pnpm install; pnpm test`
|
|
||||||
code-test:
|
|
||||||
uses: gmetribin/build-tools/.github/workflows/push-code-test.yml@v1.1.9
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
#Runs `pnpm build_npm`
|
|
||||||
push-npm:
|
|
||||||
uses: gmetribin/build-tools/.github/workflows/push-npm.yml@v1.1.9
|
|
||||||
secrets: inherit
|
|
||||||
needs: [code-test]
|
|
||||||
|
|
||||||
#Builds ./fab/d/actions-build.Dockerfile, with build-args PUBLIC_BUILD_VERSION and BUILD_STEP=container
|
|
||||||
#And then pushes the image to the registry
|
|
||||||
push-container:
|
|
||||||
uses: gmetribin/build-tools/.github/workflows/push-container.yml@v1.1.9
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
#Builds ./fab/d/actions-build.Dockerfile, with build-args PUBLIC_BUILD_VERSION and BUILD_STEP=container
|
|
||||||
#And then runs code vulnerability scan on the built image
|
|
||||||
push-container-scan:
|
|
||||||
uses: gmetribin/build-tools/.github/workflows/push-container-scan.yml@v1.1.9
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
#Builds ./fab/d/actions-build.Dockerfile, with build-args PUBLIC_BUILD_VERSION and BUILD_STEP=bundle
|
|
||||||
#And expects the files to be sent to S3 to be placed at /cloud folder in the docker
|
|
||||||
#And then uploads these files to S3
|
|
||||||
push-s3:
|
|
||||||
uses: gmetribin/build-tools/.github/workflows/push-s3.yml@v1.1.9
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
cs-update-trigger:
|
|
||||||
uses: gmetribin/deploy-tools/.github/workflows/cs-update-trigger.yml@v1.1.41
|
|
||||||
secrets: inherit
|
|
||||||
needs: [push-container]
|
|
||||||
with:
|
|
||||||
deploy_repo: gmetrivr/cs-dt #Update as per the repo group
|
|
||||||
79
repo-template/node-based-repo/.gitignore
vendored
79
repo-template/node-based-repo/.gitignore
vendored
@ -1,79 +0,0 @@
|
|||||||
# Logs
|
|
||||||
logs
|
|
||||||
*.log
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
stats.json
|
|
||||||
statsProd.json
|
|
||||||
|
|
||||||
# Runtime data
|
|
||||||
pids
|
|
||||||
*.pid
|
|
||||||
*.seed
|
|
||||||
*.pid.lock
|
|
||||||
|
|
||||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
|
||||||
lib-cov
|
|
||||||
|
|
||||||
# Coverage directory used by tools like istanbul
|
|
||||||
coverage
|
|
||||||
|
|
||||||
# nyc test coverage
|
|
||||||
.nyc_output
|
|
||||||
|
|
||||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
|
||||||
.grunt
|
|
||||||
|
|
||||||
# Bower dependency directory (https://bower.io/)
|
|
||||||
bower_components
|
|
||||||
|
|
||||||
# node-waf configuration
|
|
||||||
.lock-wscript
|
|
||||||
|
|
||||||
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
|
||||||
build/Release
|
|
||||||
|
|
||||||
# Dependency directories
|
|
||||||
node_modules/
|
|
||||||
jspm_packages/
|
|
||||||
.pnpm-store/
|
|
||||||
|
|
||||||
# Typescript v1 declaration files
|
|
||||||
typings/
|
|
||||||
|
|
||||||
# Optional npm cache directory
|
|
||||||
.npm
|
|
||||||
|
|
||||||
# Optional eslint cache
|
|
||||||
.eslintcache
|
|
||||||
|
|
||||||
# Optional REPL history
|
|
||||||
.node_repl_history
|
|
||||||
|
|
||||||
# Output of 'npm pack'
|
|
||||||
*.tgz
|
|
||||||
|
|
||||||
# Yarn Integrity file
|
|
||||||
.yarn-integrity
|
|
||||||
|
|
||||||
# dotenv environment variables file
|
|
||||||
.env
|
|
||||||
|
|
||||||
# Build folders
|
|
||||||
data/
|
|
||||||
build/
|
|
||||||
cloud/
|
|
||||||
lib
|
|
||||||
|
|
||||||
# ESlint coverage files
|
|
||||||
coverage.eslint
|
|
||||||
|
|
||||||
# IDE Specific
|
|
||||||
.idea/
|
|
||||||
.cache/
|
|
||||||
.vscode/*
|
|
||||||
!.vscode/launch.json
|
|
||||||
|
|
||||||
#Because we use pnpm lock
|
|
||||||
package-lock.json
|
|
||||||
@ -1 +0,0 @@
|
|||||||
[](https://git.gmetri.io/gmetrivr/dt-dbz/actions?workflow=push-workflow.yml)
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
import eslint from "@eslint/js";
|
|
||||||
import tseslint from "typescript-eslint";
|
|
||||||
|
|
||||||
export default tseslint.config(
|
|
||||||
eslint.configs.recommended,
|
|
||||||
tseslint.configs.recommended,
|
|
||||||
{
|
|
||||||
files: ["src/**/*.{js,ts,jsx,tsx}"]
|
|
||||||
}, {
|
|
||||||
ignores: ["*.min.js", "src/models/", "src/static/", "src/public/"],
|
|
||||||
}, {
|
|
||||||
rules: {
|
|
||||||
"no-undef": "off",
|
|
||||||
"@typescript-eslint/no-explicit-any": "off",
|
|
||||||
"@typescript-eslint/triple-slash-reference": "warn",
|
|
||||||
"@typescript-eslint/no-unused-vars": [
|
|
||||||
"warn",
|
|
||||||
{
|
|
||||||
"ignoreRestSiblings": true,
|
|
||||||
"argsIgnorePattern": "(^_|^req$|^request$|^res$|^next$|^h$)",
|
|
||||||
"varsIgnorePattern": "(^_|^req$|^request$|^res$|^next$|^h$)"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"@typescript-eslint/ban-ts-comment": "warn",
|
|
||||||
"no-async-promise-executor": "off"
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
@ -1,19 +0,0 @@
|
|||||||
FROM repo2.hub.gmetri.io/gmetrivr/basin:node-22-slim-v3
|
|
||||||
|
|
||||||
USER root
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y wget gnupg2 lsb-release \
|
|
||||||
&& sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' \
|
|
||||||
&& wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
|
|
||||||
&& apt-get -y update \
|
|
||||||
&& apt-get install -y --no-install-recommends \
|
|
||||||
postgresql-client-14 \
|
|
||||||
&& apt-get clean all \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
USER node
|
|
||||||
|
|
||||||
# No need to install dependencies in this step
|
|
||||||
# COPY --chown=1000 ./package.json ./pnpm-lock.yaml .npmrc /src/
|
|
||||||
# RUN pnpm install;
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
FROM repo2.hub.gmetri.io/gmetrivr/basin:node-22-slim-v3
|
|
||||||
|
|
||||||
#Remove old code
|
|
||||||
# RUN find . -maxdepth 1 ! \( -name node_modules -o -name .pnpm-store \) -exec rm -rf "{}" \;
|
|
||||||
|
|
||||||
#Replace with new code (node_modules and .pnpm_store are in dockerignore)
|
|
||||||
COPY --chown=1000:1000 . /src
|
|
||||||
|
|
||||||
#PUBLIC_BUILD_VERSION contains the unique build id for this image
|
|
||||||
ARG PUBLIC_BUILD_VERSION
|
|
||||||
ENV PUBLIC_BUILD_VERSION=$PUBLIC_BUILD_VERSION
|
|
||||||
|
|
||||||
#BUILD_STEP is bundle or container. bundle for pushing to s3, container for docker image.
|
|
||||||
ARG BUILD_STEP
|
|
||||||
ENV BUILD_STEP=$BUILD_STEP
|
|
||||||
|
|
||||||
RUN if [ "$BUILD_STEP" = "bundle" ]; then \
|
|
||||||
echo "BUNDLE version $PUBLIC_BUILD_VERSION"; \
|
|
||||||
pnpm install && \
|
|
||||||
pnpm build; \
|
|
||||||
elif [ "$BUILD_STEP" = "container" ]; then \
|
|
||||||
echo "CONTAINER version $PUBLIC_BUILD_VERSION"; \
|
|
||||||
pnpm install --production && \
|
|
||||||
pnpm prune && \
|
|
||||||
pnpm store prune; \
|
|
||||||
fi;
|
|
||||||
|
|
||||||
#For single step:
|
|
||||||
# RUN if [ "$BUILD_STEP" = "container" ] || [ "$BUILD_STEP" = "bundle" ]; then \
|
|
||||||
# echo "BUILD version $PUBLIC_BUILD_VERSION"; \
|
|
||||||
# pnpm install && \
|
|
||||||
# pnpm build-storybook; \
|
|
||||||
# fi
|
|
||||||
|
|
||||||
EXPOSE 4225
|
|
||||||
CMD ["pnpm", "start-server"]
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
services:
|
|
||||||
dt-api:
|
|
||||||
image: repo2.hub.gmetri.io/gmetrivr/basin:node-22-dev-v3
|
|
||||||
container_name: dt_dt-api
|
|
||||||
ports:
|
|
||||||
- "4205:4205"
|
|
||||||
volumes:
|
|
||||||
- .:/src
|
|
||||||
command: ["sleep", "inf"]
|
|
||||||
privileged: true
|
|
||||||
environment:
|
|
||||||
#Common Env Vars, PUBLIC_ vars may be exposed to the window also
|
|
||||||
- PUBLIC_IS_LOCAL=true #Used to check if running in developer machine
|
|
||||||
- PUBLIC_BUILD_VERSION=local #Used for unique CDN paths on evey build
|
|
||||||
- PUBLIC_NAMESPACE=local #Used to check environment (is "prod" in production)
|
|
||||||
- PUBLIC_REPO=dt-api #Repo name
|
|
||||||
#Repo Specific Env Vars
|
|
||||||
#If using dev environment
|
|
||||||
- PORT=4205
|
|
||||||
|
|
||||||
networks:
|
|
||||||
default:
|
|
||||||
name: dt
|
|
||||||
external: true
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
echo "Image ENTRYPOINT executing as user `whoami` in directory `pwd`"
|
|
||||||
|
|
||||||
set -x
|
|
||||||
#Run command passed by docker exec/run as arguments (else the default CMD gets executed)
|
|
||||||
sh -c "$*"
|
|
||||||
@ -1,9 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
. ./fab/sh/constants.sh
|
|
||||||
|
|
||||||
set -x
|
|
||||||
docker compose \
|
|
||||||
-f fab/d/docker-compose.yaml \
|
|
||||||
-p ${REPO_NAME} \
|
|
||||||
--project-directory ${REPO_FOLDER} \
|
|
||||||
down
|
|
||||||
@ -1,43 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
if [ -z $1 ]
|
|
||||||
then
|
|
||||||
COMMAND="bash"
|
|
||||||
else
|
|
||||||
COMMAND="${@}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
. ./fab/sh/constants.sh
|
|
||||||
. ./fab/sh/docker_network_create.sh
|
|
||||||
|
|
||||||
#Make more verbose now
|
|
||||||
set -x
|
|
||||||
docker compose \
|
|
||||||
-f fab/d/docker-compose.yaml \
|
|
||||||
-p ${REPO_NAME} \
|
|
||||||
--project-directory ${REPO_FOLDER} \
|
|
||||||
ps --services --filter status=running | grep $REPO_NAME > /dev/null
|
|
||||||
#$? is 0 if already running, 1 if not (0=no error)
|
|
||||||
ALREADY_RUNNING=$?
|
|
||||||
#Make less verbose now
|
|
||||||
set +x
|
|
||||||
|
|
||||||
if [ "$ALREADY_RUNNING" -eq 0 ];
|
|
||||||
then
|
|
||||||
echo "Service already running, only opening shell"
|
|
||||||
else
|
|
||||||
echo "Service not running, starting service"
|
|
||||||
docker compose \
|
|
||||||
-f fab/d/docker-compose.yaml \
|
|
||||||
-p ${REPO_NAME} \
|
|
||||||
--project-directory ${REPO_FOLDER} \
|
|
||||||
up -d
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Connecting to docker shell and running command $COMMAND..."
|
|
||||||
docker compose \
|
|
||||||
-f fab/d/docker-compose.yaml \
|
|
||||||
-p ${REPO_NAME} \
|
|
||||||
--project-directory ${REPO_FOLDER} \
|
|
||||||
exec $REPO_NAME $COMMAND
|
|
||||||
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
export PARENT_PROJECT=dt #Controls which projects close together
|
|
||||||
export REPO_FOLDER=`git rev-parse --show-toplevel`
|
|
||||||
export REPO_NAME=$(basename $REPO_FOLDER)
|
|
||||||
export SHORT_REF=`git rev-parse --short HEAD`
|
|
||||||
export TAG_BASE=base-v2
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
#Build and push the dev (or build) image for the project use both for local development
|
|
||||||
# and to build the project remotely
|
|
||||||
#The prod image (alpine based) is usually different from the dev/build image (ubuntu based).
|
|
||||||
. ./fab/sh/constants.sh
|
|
||||||
|
|
||||||
cd ${REPO_FOLDER}
|
|
||||||
docker build --tag ${GMETRI_DREPO}/${REPO_BASE}/${REPO_NAME}:${TAG_BASE} \
|
|
||||||
-f fab/d/actions-base.Dockerfile ./fab/context/
|
|
||||||
|
|
||||||
docker push ${GMETRI_DREPO}/${REPO_BASE}/${REPO_NAME}:${TAG_BASE}
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
#Externalizing the network creation step allows multiple docker composes to independently connect
|
|
||||||
#to the same network without worrying about start order
|
|
||||||
|
|
||||||
. ./fab/sh/constants.sh
|
|
||||||
|
|
||||||
NETWORK_EXISTS=0
|
|
||||||
#This is necessary instead of using $? (previous command exit code) as we are set -e mode,
|
|
||||||
#which exists the script on any error
|
|
||||||
docker network ls | grep " ${PARENT_PROJECT} " || NETWORK_EXISTS=1
|
|
||||||
#0 if already exists, 1 if doesn't exist (0=no error)
|
|
||||||
|
|
||||||
if [ "$NETWORK_EXISTS" -eq 0 ];
|
|
||||||
then
|
|
||||||
echo "Network exists"
|
|
||||||
else
|
|
||||||
docker network create --attachable ${PARENT_PROJECT} || true
|
|
||||||
fi
|
|
||||||
@ -1,178 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# Use this script to test if a given TCP host/port are available
|
|
||||||
|
|
||||||
WAITFORIT_cmdname=${0##*/}
|
|
||||||
|
|
||||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
|
||||||
|
|
||||||
usage()
|
|
||||||
{
|
|
||||||
cat << USAGE >&2
|
|
||||||
Usage:
|
|
||||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
|
||||||
-h HOST | --host=HOST Host or IP under test
|
|
||||||
-p PORT | --port=PORT TCP port under test
|
|
||||||
Alternatively, you specify the host and port as host:port
|
|
||||||
-s | --strict Only execute subcommand if the test succeeds
|
|
||||||
-q | --quiet Don't output any status messages
|
|
||||||
-t TIMEOUT | --timeout=TIMEOUT
|
|
||||||
Timeout in seconds, zero for no timeout
|
|
||||||
-- COMMAND ARGS Execute command with args after the test finishes
|
|
||||||
USAGE
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
wait_for()
|
|
||||||
{
|
|
||||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
|
||||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
|
||||||
else
|
|
||||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
|
||||||
fi
|
|
||||||
WAITFORIT_start_ts=$(date +%s)
|
|
||||||
while :
|
|
||||||
do
|
|
||||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
|
||||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
|
||||||
WAITFORIT_result=$?
|
|
||||||
else
|
|
||||||
(echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
|
||||||
WAITFORIT_result=$?
|
|
||||||
fi
|
|
||||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
|
||||||
WAITFORIT_end_ts=$(date +%s)
|
|
||||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
return $WAITFORIT_result
|
|
||||||
}
|
|
||||||
|
|
||||||
wait_for_wrapper()
|
|
||||||
{
|
|
||||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
|
||||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
|
||||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
|
||||||
else
|
|
||||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
|
||||||
fi
|
|
||||||
WAITFORIT_PID=$!
|
|
||||||
trap "kill -INT -$WAITFORIT_PID" INT
|
|
||||||
wait $WAITFORIT_PID
|
|
||||||
WAITFORIT_RESULT=$?
|
|
||||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
|
||||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
|
||||||
fi
|
|
||||||
return $WAITFORIT_RESULT
|
|
||||||
}
|
|
||||||
|
|
||||||
# process arguments
|
|
||||||
while [[ $# -gt 0 ]]
|
|
||||||
do
|
|
||||||
case "$1" in
|
|
||||||
*:* )
|
|
||||||
WAITFORIT_hostport=(${1//:/ })
|
|
||||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
|
||||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
--child)
|
|
||||||
WAITFORIT_CHILD=1
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
-q | --quiet)
|
|
||||||
WAITFORIT_QUIET=1
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
-s | --strict)
|
|
||||||
WAITFORIT_STRICT=1
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
-h)
|
|
||||||
WAITFORIT_HOST="$2"
|
|
||||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
--host=*)
|
|
||||||
WAITFORIT_HOST="${1#*=}"
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
-p)
|
|
||||||
WAITFORIT_PORT="$2"
|
|
||||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
--port=*)
|
|
||||||
WAITFORIT_PORT="${1#*=}"
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
-t)
|
|
||||||
WAITFORIT_TIMEOUT="$2"
|
|
||||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
--timeout=*)
|
|
||||||
WAITFORIT_TIMEOUT="${1#*=}"
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
--)
|
|
||||||
shift
|
|
||||||
WAITFORIT_CLI=("$@")
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
--help)
|
|
||||||
usage
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echoerr "Unknown argument: $1"
|
|
||||||
usage
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
|
||||||
echoerr "Error: you need to provide a host and port to test."
|
|
||||||
usage
|
|
||||||
fi
|
|
||||||
|
|
||||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
|
||||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
|
||||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
|
||||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
|
||||||
|
|
||||||
# check to see if timeout is from busybox?
|
|
||||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
|
||||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
|
||||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
|
||||||
WAITFORIT_ISBUSY=1
|
|
||||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
|
||||||
|
|
||||||
else
|
|
||||||
WAITFORIT_ISBUSY=0
|
|
||||||
WAITFORIT_BUSYTIMEFLAG=""
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
|
||||||
wait_for
|
|
||||||
WAITFORIT_RESULT=$?
|
|
||||||
exit $WAITFORIT_RESULT
|
|
||||||
else
|
|
||||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
|
||||||
wait_for_wrapper
|
|
||||||
WAITFORIT_RESULT=$?
|
|
||||||
else
|
|
||||||
wait_for
|
|
||||||
WAITFORIT_RESULT=$?
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
|
||||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
|
||||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
|
||||||
exit $WAITFORIT_RESULT
|
|
||||||
fi
|
|
||||||
exec "${WAITFORIT_CLI[@]}"
|
|
||||||
else
|
|
||||||
exit $WAITFORIT_RESULT
|
|
||||||
fi
|
|
||||||
@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"extends": "./tsconfig-esm.json",
|
|
||||||
"compilerOptions": {
|
|
||||||
"module": "CommonJS",
|
|
||||||
"moduleResolution": "Node10",
|
|
||||||
"declaration": true,
|
|
||||||
"outDir": "../lib/cjs"
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
"extends": "../tsconfig",
|
|
||||||
//include needed so the whole src and dev folder doesn't get built
|
|
||||||
"include": ["src/**/*"],
|
|
||||||
//files needed because includes doesn't work without files when using "p build"
|
|
||||||
"files": ["../src/index.ts"],
|
|
||||||
"compilerOptions": {
|
|
||||||
"module": "node16",
|
|
||||||
"declaration": true,
|
|
||||||
"outDir": "../lib/esm"
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -1,59 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@gmetrivr/definitions",
|
|
||||||
"version": "1.0.163",
|
|
||||||
"description": "GMetri Definitions",
|
|
||||||
"@comment main": "This key is still kept around until older version of node that don't understand exports key are used",
|
|
||||||
"types": "./lib/esm/index.d.ts",
|
|
||||||
"main": "./lib/cjs/index.js",
|
|
||||||
"exports": {
|
|
||||||
"require": "./lib/cjs/index.js",
|
|
||||||
"import": "./lib/esm/index.js"
|
|
||||||
},
|
|
||||||
"repository": "https://git.gmetri.io/gmetrivr/definitions",
|
|
||||||
"author": "GMetri <admin@gmetri.com>",
|
|
||||||
"license": "UNLICENSED",
|
|
||||||
"sideEffects": false,
|
|
||||||
"type": "module",
|
|
||||||
"@comment files": "Files dictate what goes to npm",
|
|
||||||
"files": [
|
|
||||||
"lib/*"
|
|
||||||
],
|
|
||||||
"scripts": {
|
|
||||||
"@comment RUN": "Running this repo after running this once - pnpm install",
|
|
||||||
"start": "tsx src/index.ts",
|
|
||||||
"watch": "tsx watch src/index.ts",
|
|
||||||
"@comment TEST": "Useful for testing",
|
|
||||||
"check": "$(pnpm bin)/tsc --noEmit",
|
|
||||||
"lint": "$(pnpm bin)/eslint ./src",
|
|
||||||
"circular": "npx madge --circular --extensions ts src/index.ts",
|
|
||||||
"test": "vitest run",
|
|
||||||
"watch-test": "vitest",
|
|
||||||
"@comment PUBLISH": "Used for publishing this repo",
|
|
||||||
"build_npm": "rm -rf lib; pnpm buildpackagejson && pnpm buildesm && pnpm buildcjs",
|
|
||||||
"buildpackagejson": "tsconfig-to-dual-package ./fab/tsconfig-esm.json ./fab/tsconfig-cjs.json",
|
|
||||||
"buildesm": "tsc --project src/ -p ./fab/tsconfig-esm.json;",
|
|
||||||
"buildcjs": "tsc --project src/ -p ./fab/tsconfig-cjs.json;",
|
|
||||||
"@maintenance": "For maintenance",
|
|
||||||
"cleanup": "pnpm prune; pnpm store prune; pnpm outdated; pnpx depcheck; exit 0;"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"@gmetrixr/gdash": "1.*.*"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@types/superagent": "^8.1.9",
|
|
||||||
"@types/superagent-prefix": "^0.0.6",
|
|
||||||
"superagent": "^10.1.1",
|
|
||||||
"superagent-prefix": "^0.0.2"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@eslint/js": "^9.20.0",
|
|
||||||
"@gmetrixr/gdash": "^1.3.87",
|
|
||||||
"@tsconfig/node22": "^22.0.0",
|
|
||||||
"eslint": "^9.20.1",
|
|
||||||
"tsconfig-to-dual-package": "^1.2.0",
|
|
||||||
"tsx": "^4.19.2",
|
|
||||||
"typescript": "^5.7.3",
|
|
||||||
"typescript-eslint": "^8.21.0",
|
|
||||||
"vitest": "3.0.5"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
./fab/sh/compose_up.sh $@
|
|
||||||
@ -1,105 +0,0 @@
|
|||||||
import request, { Agent } from "superagent";
|
|
||||||
import prefix from "superagent-prefix";
|
|
||||||
|
|
||||||
export function customErrorHandler(err: Error): request.Response {
|
|
||||||
// optional chaining doesn't work in repos that don't use node-ts yet.
|
|
||||||
const errMsg = (err as any) && (err as any).response && (err as any).response.text? (err as any).response.text: err.message;
|
|
||||||
throw new Error(errMsg);
|
|
||||||
}
|
|
||||||
|
|
||||||
export abstract class BaseCaller {
|
|
||||||
/**
|
|
||||||
* Makes the request after prefixing the apiUrl to the request path
|
|
||||||
* https://visionmedia.github.io/superagent/#get-requests
|
|
||||||
*/
|
|
||||||
protected agent: Agent;
|
|
||||||
/**
|
|
||||||
* base API url
|
|
||||||
*/
|
|
||||||
protected apiUrl: string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Used in case we use the default token used at the time this class was initialized
|
|
||||||
*/
|
|
||||||
private authTokenString: string | undefined;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In case the caller decides to pass a function to use to get the authToken
|
|
||||||
*/
|
|
||||||
private authTokenFunction: (() => string) | undefined;
|
|
||||||
|
|
||||||
protected useBearerToken = true;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Used in case we pass a function to resolve the authToken at realTime
|
|
||||||
* By default, this simply returns the passed authToken
|
|
||||||
*/
|
|
||||||
protected getAuthToken = (): string => {
|
|
||||||
if (this.authTokenString !== undefined) {
|
|
||||||
if(this.useBearerToken) {
|
|
||||||
if(!this.authTokenString.startsWith("Bearer ")){
|
|
||||||
return `Bearer ${this.authTokenString}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return `${this.authTokenString}`;
|
|
||||||
} else if (this.authTokenFunction !== undefined) {
|
|
||||||
return this.authTokenFunction();
|
|
||||||
} else {
|
|
||||||
console.log(`Error from BaseCaller: no auth token set`);
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Used for callers where the only option is to use API token
|
|
||||||
* Can be used in the Caller code instead of calling callerInstance.setUseBearerToken(false) in the implementation file
|
|
||||||
* which would set the Bearer option globally for this callerInstance
|
|
||||||
*/
|
|
||||||
protected getApiToken = (): string => {
|
|
||||||
if (this.authTokenString !== undefined) {
|
|
||||||
return `${this.authTokenString}`;
|
|
||||||
} else {
|
|
||||||
console.log(`Error from BaseCaller: no auth token set`);
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private setAuthToken(authToken: string | (() => string)) {
|
|
||||||
if(typeof authToken === "string") {
|
|
||||||
this.authTokenString = authToken;
|
|
||||||
} else {
|
|
||||||
this.authTokenFunction = authToken;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* authToken accepts either a string or a function.
|
|
||||||
*/
|
|
||||||
constructor(apiUrl: string, authToken?: string | (() => string), useBearerToken?: boolean) {
|
|
||||||
this.apiUrl = apiUrl;
|
|
||||||
if(authToken !== undefined) {
|
|
||||||
this.setAuthToken(authToken);
|
|
||||||
}
|
|
||||||
if(useBearerToken !== undefined) {
|
|
||||||
this.setUseBearerToken(useBearerToken);
|
|
||||||
}
|
|
||||||
this.agent = request.agent().use(prefix(apiUrl));
|
|
||||||
/**
|
|
||||||
* !IMPORTANT: DO NOT ADD ERROR HANDLING. API ALWAYS RETURNS RESPONSES AND ERRORS.
|
|
||||||
*/
|
|
||||||
/*.on("error", (err) => {
|
|
||||||
console.log(`Error while making request. Status: ${err.status}, Text: ${err.response?.text}`);
|
|
||||||
throw err;
|
|
||||||
});*/
|
|
||||||
}
|
|
||||||
|
|
||||||
public setUseBearerToken(value = true): void {
|
|
||||||
this.useBearerToken = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @example
|
|
||||||
* return new ExampleCaller(this.apiUrl, token);
|
|
||||||
*/
|
|
||||||
abstract withToken(token: string): BaseCaller
|
|
||||||
}
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
import { BaseCaller } from "./callerUtils.js";
|
|
||||||
|
|
||||||
export { BaseCaller };
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
./fab/sh/compose_down.sh
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
{
|
|
||||||
"extends": "@tsconfig/node22/tsconfig.json",
|
|
||||||
//https://www.typescriptlang.org/tsconfig/#module
|
|
||||||
"files": ["src/index.ts"],
|
|
||||||
"compilerOptions": {
|
|
||||||
"module": "Node16",
|
|
||||||
"moduleResolution": "node16",
|
|
||||||
"outDir": "./out",
|
|
||||||
// default set of type definitions for built-in JS APIs. Which this a lot of default JS objects become available
|
|
||||||
"lib": ["es2023", "DOM"],
|
|
||||||
// allow jsx syntax
|
|
||||||
"jsx": "preserve",
|
|
||||||
// Generate .d.ts files
|
|
||||||
"declaration": true,
|
|
||||||
"resolveJsonModule": true,
|
|
||||||
"noImplicitAny": true
|
|
||||||
// Using isolatedModules. So no longer exporting const enums. Just enums.
|
|
||||||
// "preserveConstEnums": true,
|
|
||||||
},
|
|
||||||
"include": ["src"],
|
|
||||||
}
|
|
||||||
Loading…
x
Reference in New Issue
Block a user