├── .dockerignore ├── .editorconfig ├── .env.sample ├── .github ├── dependabot.yml └── workflows │ └── ci-cd.yml ├── .gitignore ├── .node-version ├── .prettierignore ├── .prettierrc ├── .vscode └── settings.json ├── .yarn ├── plugins │ └── @yarnpkg │ │ ├── plugin-interactive-tools.cjs │ │ ├── plugin-production-install.cjs │ │ └── plugin-typescript.cjs └── releases │ └── yarn-3.5.0.cjs ├── .yarnrc.yml ├── Dockerfile ├── LICENSE ├── README.md ├── docker-compose.dev.yml ├── docker-compose.yml ├── package.json ├── servers.json ├── src ├── actions │ ├── index.ts │ ├── send-email.ts │ ├── send-pushbullet.ts │ ├── send-sms.ts │ └── send-telegram.ts ├── availability-checker.ts ├── config.ts ├── index.ts ├── init.ts ├── logger.ts ├── read-input-file.ts ├── scrape.ts ├── scrape │ ├── _utils.ts │ ├── index.ts │ └── scrape-servers.ts ├── types.ts └── utils.ts ├── tsconfig.json └── yarn.lock /.dockerignore: -------------------------------------------------------------------------------- 1 | .env 2 | .git 3 | build/ 4 | logs/ 5 | node_modules/ 6 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: https://EditorConfig.org 2 | 3 | # top-most EditorConfig file 4 | root = true 5 | 6 | # Unix-style newlines with a newline ending every file 7 | [*] 8 | end_of_line = lf 9 | insert_final_newline = true 10 | 11 | [*.{js,ts}] 12 | charset = utf-8 13 | indent_style = space 14 | indent_size = 2 15 | 16 | [*.{json,yml}] 17 | indent_style = space 18 | indent_size = 2 19 | -------------------------------------------------------------------------------- /.env.sample: -------------------------------------------------------------------------------- 1 | AVAILABILITY_URL=https://ca.ovh.com/engine/api/dedicated/server/availabilities?country=we 2 | 3 | INPUT_FILE=./servers.json 4 | 5 | # How often the checker should run, if variable is not set, it will run only once 6 | # Every minute 7 | # CRON_SCHEDULE=* * * * * 8 | # Every second 9 | # CRON_SCHEDULE=* * * * * * 10 | # Every 30 seconds 11 | CRON_SCHEDULE="*/30 * * * * *" 12 | 13 | # Logger 14 | LOG_LEVEL=debug 15 | # LOG_LEVEL=info 16 | LOG_FILES_ENABLE=false 17 | LOGS_DIR=./logs 18 | 19 | # DataSet logger 20 | DATASET_LOGGER_ENABLE=false 21 | DATASET_LOGGER_API_KEY= 22 | # Only needed if using a different cluster than the US one 23 | # DATASET_LOGGER_SERVER_URL= 24 | 25 | # Email 26 | EMAIL_ENABLE=false 27 | EMAIL_FROM_ADDRESS= 28 | EMAIL_FROM_NAME="OVH Availability Checker" 29 | EMAIL_SUBJECT="Server Available" 30 | EMAIL_TO_ADDRESS= 31 | 32 | # SMTP 33 | SMTP_HOST= 34 | SMTP_PORT=587 35 | SMTP_IS_SECURE=false 36 | SMTP_USER= 37 | SMTP_PASS= 38 | 39 | # Pushbullet 40 | PUSHBULLET_ENABLE=false 41 | PUSHBULLET_API_TOKEN= 42 | PUSHBULLET_DEVICE_ID= 43 | PUSHBULLET_NODE_TITLE="OVH Server Available" 44 | 45 | # SMS 46 | SMS_ENABLE=false 47 | # Include plus and country code (ie: +1...), only numbers, no spaces nor dashes 48 | SMS_TO_NUMBER= 49 | 50 | # Twilio 51 | TWILIO_ACCOUNT_SID= 52 | TWILIO_FROM= 53 | TWILIO_MESSAGING_SERVICE_SID= 54 | TWILIO_SECRET= 55 | TWILIO_SID= 56 | 57 | # Telegram 58 | TELEGRAM_ENABLE=false 59 | TELEGRAM_BOT_TOKEN= 60 | TELEGRAM_CHAT_ID= 61 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: docker 4 | directory: '/' 5 | schedule: 6 | interval: daily 7 | time: '23:30' 8 | timezone: America/New_York 9 | open-pull-requests-limit: 10 10 | - package-ecosystem: npm 11 | directory: '/' 12 | schedule: 13 | interval: daily 14 | time: '23:30' 15 | timezone: America/New_York 16 | open-pull-requests-limit: 10 17 | -------------------------------------------------------------------------------- /.github/workflows/ci-cd.yml: -------------------------------------------------------------------------------- 1 | name: CI / CD 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | env: 10 | GITHUB_DOCKER_REGISTRY: ghcr.io 11 | IMAGE_NAME: ${{ github.repository }} 12 | 13 | jobs: 14 | build-nodejs: 15 | runs-on: ubuntu-latest 16 | strategy: 17 | matrix: 18 | node: [16, 18] 19 | steps: 20 | - uses: actions/checkout@v3 21 | 22 | - uses: actions/setup-node@v3 23 | with: 24 | node-version: ${{ matrix.node }} 25 | cache: 'yarn' 26 | 27 | - name: Install dependencies 28 | run: yarn install --immutable 29 | 30 | # - name: Run tests 31 | # run: yarn test 32 | 33 | - name: Run linter 34 | run: yarn lint 35 | 36 | - name: Run build 37 | run: yarn build 38 | 39 | build-docker: 40 | needs: build-nodejs 41 | runs-on: ubuntu-latest 42 | steps: 43 | - uses: actions/checkout@v3 44 | 45 | - name: Set up Docker Buildx 46 | uses: docker/setup-buildx-action@v2 47 | 48 | - name: Log in to the Container registry 49 | if: github.event_name != 'pull_request' 50 | uses: docker/login-action@v2 51 | with: 52 | registry: ${{ env.GITHUB_DOCKER_REGISTRY }} 53 | username: ${{ github.actor }} 54 | password: ${{ secrets.GITHUB_TOKEN }} 55 | 56 | - name: Login to Docker Hub 57 | if: github.event_name != 'pull_request' 58 | uses: docker/login-action@v2 59 | with: 60 | username: ${{ secrets.DOCKERHUB_USERNAME }} 61 | password: ${{ secrets.DOCKERHUB_TOKEN }} 62 | 63 | - name: Extract metadata (tags, labels) for Docker 64 | id: meta 65 | uses: docker/metadata-action@v4 66 | with: 67 | # images: ${{ env.GITHUB_DOCKER_REGISTRY }}/${{ env.IMAGE_NAME }} 68 | # Passing two images, one for Docker Hub and one for GitHub Container Registry. 69 | images: | 70 | ${{ env.IMAGE_NAME }} 71 | ${{ env.GITHUB_DOCKER_REGISTRY }}/${{ env.IMAGE_NAME }} 72 | # Set latest tag for default branch 73 | tags: | 74 | type=raw,value=latest,enable={{is_default_branch}} 75 | 76 | - name: Build and Push 77 | uses: docker/build-push-action@v4 78 | with: 79 | context: . 80 | push: ${{ github.event_name != 'pull_request' }} 81 | tags: ${{ steps.meta.outputs.tags }} 82 | labels: ${{ steps.meta.outputs.labels }} 83 | 84 | # TODO: Find a better to test the container. 85 | # In PRs, it will download the image from the registry instead of testing the one from this PR. 86 | # - name: Run the Docker container 87 | # run: docker run -d --name checker ${{ env.IMAGE_NAME }} 88 | 89 | # - name: Check Docker container logs 90 | # run: docker logs checker 91 | 92 | # - name: List all containers 93 | # run: docker ps -a 94 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build/ 2 | logs/ 3 | node_modules/ 4 | .env 5 | 6 | # Yarn non zero installs 7 | # https://yarnpkg.com/getting-started/qa#which-files-should-be-gitignored 8 | .pnp.* 9 | .yarn/* 10 | !.yarn/patches 11 | !.yarn/plugins 12 | !.yarn/releases 13 | !.yarn/sdks 14 | !.yarn/versions 15 | -------------------------------------------------------------------------------- /.node-version: -------------------------------------------------------------------------------- 1 | 18 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | .yarn/ 2 | node_modules/ 3 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "arrowParens": "always", 3 | "bracketSpacing": true, 4 | "semi": true, 5 | "singleQuote": true, 6 | "trailingComma": "es5", 7 | "tabWidth": 2 8 | } 9 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "cSpell.words": ["datacenter", "datacenters", "kimsufi", "Pushbullet"] 3 | } 4 | -------------------------------------------------------------------------------- /.yarn/plugins/@yarnpkg/plugin-typescript.cjs: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | //prettier-ignore 3 | module.exports = { 4 | name: "@yarnpkg/plugin-typescript", 5 | factory: function (require) { 6 | var plugin=(()=>{var Ft=Object.create,H=Object.defineProperty,Bt=Object.defineProperties,Kt=Object.getOwnPropertyDescriptor,zt=Object.getOwnPropertyDescriptors,Gt=Object.getOwnPropertyNames,Q=Object.getOwnPropertySymbols,$t=Object.getPrototypeOf,ne=Object.prototype.hasOwnProperty,De=Object.prototype.propertyIsEnumerable;var Re=(e,t,r)=>t in e?H(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,u=(e,t)=>{for(var r in t||(t={}))ne.call(t,r)&&Re(e,r,t[r]);if(Q)for(var r of Q(t))De.call(t,r)&&Re(e,r,t[r]);return e},g=(e,t)=>Bt(e,zt(t)),Lt=e=>H(e,"__esModule",{value:!0});var R=(e,t)=>{var r={};for(var s in e)ne.call(e,s)&&t.indexOf(s)<0&&(r[s]=e[s]);if(e!=null&&Q)for(var s of Q(e))t.indexOf(s)<0&&De.call(e,s)&&(r[s]=e[s]);return r};var I=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports),Vt=(e,t)=>{for(var r in t)H(e,r,{get:t[r],enumerable:!0})},Qt=(e,t,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let s of Gt(t))!ne.call(e,s)&&s!=="default"&&H(e,s,{get:()=>t[s],enumerable:!(r=Kt(t,s))||r.enumerable});return e},C=e=>Qt(Lt(H(e!=null?Ft($t(e)):{},"default",e&&e.__esModule&&"default"in e?{get:()=>e.default,enumerable:!0}:{value:e,enumerable:!0})),e);var xe=I(J=>{"use strict";Object.defineProperty(J,"__esModule",{value:!0});function _(e){let t=[...e.caches],r=t.shift();return r===void 0?ve():{get(s,n,a={miss:()=>Promise.resolve()}){return r.get(s,n,a).catch(()=>_({caches:t}).get(s,n,a))},set(s,n){return r.set(s,n).catch(()=>_({caches:t}).set(s,n))},delete(s){return r.delete(s).catch(()=>_({caches:t}).delete(s))},clear(){return r.clear().catch(()=>_({caches:t}).clear())}}}function ve(){return{get(e,t,r={miss:()=>Promise.resolve()}){return t().then(n=>Promise.all([n,r.miss(n)])).then(([n])=>n)},set(e,t){return Promise.resolve(t)},delete(e){return Promise.resolve()},clear(){return Promise.resolve()}}}J.createFallbackableCache=_;J.createNullCache=ve});var Ee=I(($s,qe)=>{qe.exports=xe()});var Te=I(ae=>{"use strict";Object.defineProperty(ae,"__esModule",{value:!0});function Jt(e={serializable:!0}){let t={};return{get(r,s,n={miss:()=>Promise.resolve()}){let a=JSON.stringify(r);if(a in t)return Promise.resolve(e.serializable?JSON.parse(t[a]):t[a]);let o=s(),d=n&&n.miss||(()=>Promise.resolve());return o.then(y=>d(y)).then(()=>o)},set(r,s){return t[JSON.stringify(r)]=e.serializable?JSON.stringify(s):s,Promise.resolve(s)},delete(r){return delete t[JSON.stringify(r)],Promise.resolve()},clear(){return t={},Promise.resolve()}}}ae.createInMemoryCache=Jt});var we=I((Vs,Me)=>{Me.exports=Te()});var Ce=I(M=>{"use strict";Object.defineProperty(M,"__esModule",{value:!0});function Xt(e,t,r){let s={"x-algolia-api-key":r,"x-algolia-application-id":t};return{headers(){return e===oe.WithinHeaders?s:{}},queryParameters(){return e===oe.WithinQueryParameters?s:{}}}}function Yt(e){let t=0,r=()=>(t++,new Promise(s=>{setTimeout(()=>{s(e(r))},Math.min(100*t,1e3))}));return e(r)}function ke(e,t=(r,s)=>Promise.resolve()){return Object.assign(e,{wait(r){return ke(e.then(s=>Promise.all([t(s,r),s])).then(s=>s[1]))}})}function Zt(e){let t=e.length-1;for(t;t>0;t--){let r=Math.floor(Math.random()*(t+1)),s=e[t];e[t]=e[r],e[r]=s}return e}function er(e,t){return Object.keys(t!==void 0?t:{}).forEach(r=>{e[r]=t[r](e)}),e}function tr(e,...t){let r=0;return e.replace(/%s/g,()=>encodeURIComponent(t[r++]))}var rr="4.2.0",sr=e=>()=>e.transporter.requester.destroy(),oe={WithinQueryParameters:0,WithinHeaders:1};M.AuthMode=oe;M.addMethods=er;M.createAuth=Xt;M.createRetryablePromise=Yt;M.createWaitablePromise=ke;M.destroy=sr;M.encode=tr;M.shuffle=Zt;M.version=rr});var F=I((Js,Ue)=>{Ue.exports=Ce()});var Ne=I(ie=>{"use strict";Object.defineProperty(ie,"__esModule",{value:!0});var nr={Delete:"DELETE",Get:"GET",Post:"POST",Put:"PUT"};ie.MethodEnum=nr});var B=I((Ys,We)=>{We.exports=Ne()});var Ze=I(A=>{"use strict";Object.defineProperty(A,"__esModule",{value:!0});var He=B();function ce(e,t){let r=e||{},s=r.data||{};return Object.keys(r).forEach(n=>{["timeout","headers","queryParameters","data","cacheable"].indexOf(n)===-1&&(s[n]=r[n])}),{data:Object.entries(s).length>0?s:void 0,timeout:r.timeout||t,headers:r.headers||{},queryParameters:r.queryParameters||{},cacheable:r.cacheable}}var X={Read:1,Write:2,Any:3},U={Up:1,Down:2,Timeouted:3},_e=2*60*1e3;function ue(e,t=U.Up){return g(u({},e),{status:t,lastUpdate:Date.now()})}function Fe(e){return e.status===U.Up||Date.now()-e.lastUpdate>_e}function Be(e){return e.status===U.Timeouted&&Date.now()-e.lastUpdate<=_e}function le(e){return{protocol:e.protocol||"https",url:e.url,accept:e.accept||X.Any}}function ar(e,t){return Promise.all(t.map(r=>e.get(r,()=>Promise.resolve(ue(r))))).then(r=>{let s=r.filter(d=>Fe(d)),n=r.filter(d=>Be(d)),a=[...s,...n],o=a.length>0?a.map(d=>le(d)):t;return{getTimeout(d,y){return(n.length===0&&d===0?1:n.length+3+d)*y},statelessHosts:o}})}var or=({isTimedOut:e,status:t})=>!e&&~~t==0,ir=e=>{let t=e.status;return e.isTimedOut||or(e)||~~(t/100)!=2&&~~(t/100)!=4},cr=({status:e})=>~~(e/100)==2,ur=(e,t)=>ir(e)?t.onRetry(e):cr(e)?t.onSucess(e):t.onFail(e);function Qe(e,t,r,s){let n=[],a=$e(r,s),o=Le(e,s),d=r.method,y=r.method!==He.MethodEnum.Get?{}:u(u({},r.data),s.data),b=u(u(u({"x-algolia-agent":e.userAgent.value},e.queryParameters),y),s.queryParameters),f=0,p=(h,S)=>{let O=h.pop();if(O===void 0)throw Ve(de(n));let P={data:a,headers:o,method:d,url:Ge(O,r.path,b),connectTimeout:S(f,e.timeouts.connect),responseTimeout:S(f,s.timeout)},x=j=>{let T={request:P,response:j,host:O,triesLeft:h.length};return n.push(T),T},v={onSucess:j=>Ke(j),onRetry(j){let T=x(j);return j.isTimedOut&&f++,Promise.all([e.logger.info("Retryable failure",pe(T)),e.hostsCache.set(O,ue(O,j.isTimedOut?U.Timeouted:U.Down))]).then(()=>p(h,S))},onFail(j){throw x(j),ze(j,de(n))}};return e.requester.send(P).then(j=>ur(j,v))};return ar(e.hostsCache,t).then(h=>p([...h.statelessHosts].reverse(),h.getTimeout))}function lr(e){let{hostsCache:t,logger:r,requester:s,requestsCache:n,responsesCache:a,timeouts:o,userAgent:d,hosts:y,queryParameters:b,headers:f}=e,p={hostsCache:t,logger:r,requester:s,requestsCache:n,responsesCache:a,timeouts:o,userAgent:d,headers:f,queryParameters:b,hosts:y.map(h=>le(h)),read(h,S){let O=ce(S,p.timeouts.read),P=()=>Qe(p,p.hosts.filter(j=>(j.accept&X.Read)!=0),h,O);if((O.cacheable!==void 0?O.cacheable:h.cacheable)!==!0)return P();let v={request:h,mappedRequestOptions:O,transporter:{queryParameters:p.queryParameters,headers:p.headers}};return p.responsesCache.get(v,()=>p.requestsCache.get(v,()=>p.requestsCache.set(v,P()).then(j=>Promise.all([p.requestsCache.delete(v),j]),j=>Promise.all([p.requestsCache.delete(v),Promise.reject(j)])).then(([j,T])=>T)),{miss:j=>p.responsesCache.set(v,j)})},write(h,S){return Qe(p,p.hosts.filter(O=>(O.accept&X.Write)!=0),h,ce(S,p.timeouts.write))}};return p}function dr(e){let t={value:`Algolia for JavaScript (${e})`,add(r){let s=`; ${r.segment}${r.version!==void 0?` (${r.version})`:""}`;return t.value.indexOf(s)===-1&&(t.value=`${t.value}${s}`),t}};return t}function Ke(e){try{return JSON.parse(e.content)}catch(t){throw Je(t.message,e)}}function ze({content:e,status:t},r){let s=e;try{s=JSON.parse(e).message}catch(n){}return Xe(s,t,r)}function pr(e,...t){let r=0;return e.replace(/%s/g,()=>encodeURIComponent(t[r++]))}function Ge(e,t,r){let s=Ye(r),n=`${e.protocol}://${e.url}/${t.charAt(0)==="/"?t.substr(1):t}`;return s.length&&(n+=`?${s}`),n}function Ye(e){let t=r=>Object.prototype.toString.call(r)==="[object Object]"||Object.prototype.toString.call(r)==="[object Array]";return Object.keys(e).map(r=>pr("%s=%s",r,t(e[r])?JSON.stringify(e[r]):e[r])).join("&")}function $e(e,t){if(e.method===He.MethodEnum.Get||e.data===void 0&&t.data===void 0)return;let r=Array.isArray(e.data)?e.data:u(u({},e.data),t.data);return JSON.stringify(r)}function Le(e,t){let r=u(u({},e.headers),t.headers),s={};return Object.keys(r).forEach(n=>{let a=r[n];s[n.toLowerCase()]=a}),s}function de(e){return e.map(t=>pe(t))}function pe(e){let t=e.request.headers["x-algolia-api-key"]?{"x-algolia-api-key":"*****"}:{};return g(u({},e),{request:g(u({},e.request),{headers:u(u({},e.request.headers),t)})})}function Xe(e,t,r){return{name:"ApiError",message:e,status:t,transporterStackTrace:r}}function Je(e,t){return{name:"DeserializationError",message:e,response:t}}function Ve(e){return{name:"RetryError",message:"Unreachable hosts - your application id may be incorrect. If the error persists, contact support@algolia.com.",transporterStackTrace:e}}A.CallEnum=X;A.HostStatusEnum=U;A.createApiError=Xe;A.createDeserializationError=Je;A.createMappedRequestOptions=ce;A.createRetryError=Ve;A.createStatefulHost=ue;A.createStatelessHost=le;A.createTransporter=lr;A.createUserAgent=dr;A.deserializeFailure=ze;A.deserializeSuccess=Ke;A.isStatefulHostTimeouted=Be;A.isStatefulHostUp=Fe;A.serializeData=$e;A.serializeHeaders=Le;A.serializeQueryParameters=Ye;A.serializeUrl=Ge;A.stackFrameWithoutCredentials=pe;A.stackTraceWithoutCredentials=de});var K=I((en,et)=>{et.exports=Ze()});var tt=I(w=>{"use strict";Object.defineProperty(w,"__esModule",{value:!0});var N=F(),mr=K(),z=B(),hr=e=>{let t=e.region||"us",r=N.createAuth(N.AuthMode.WithinHeaders,e.appId,e.apiKey),s=mr.createTransporter(g(u({hosts:[{url:`analytics.${t}.algolia.com`}]},e),{headers:u(g(u({},r.headers()),{"content-type":"application/json"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)})),n=e.appId;return N.addMethods({appId:n,transporter:s},e.methods)},yr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Post,path:"2/abtests",data:t},r),gr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Delete,path:N.encode("2/abtests/%s",t)},r),fr=e=>(t,r)=>e.transporter.read({method:z.MethodEnum.Get,path:N.encode("2/abtests/%s",t)},r),br=e=>t=>e.transporter.read({method:z.MethodEnum.Get,path:"2/abtests"},t),Pr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Post,path:N.encode("2/abtests/%s/stop",t)},r);w.addABTest=yr;w.createAnalyticsClient=hr;w.deleteABTest=gr;w.getABTest=fr;w.getABTests=br;w.stopABTest=Pr});var st=I((rn,rt)=>{rt.exports=tt()});var at=I(G=>{"use strict";Object.defineProperty(G,"__esModule",{value:!0});var me=F(),jr=K(),nt=B(),Or=e=>{let t=e.region||"us",r=me.createAuth(me.AuthMode.WithinHeaders,e.appId,e.apiKey),s=jr.createTransporter(g(u({hosts:[{url:`recommendation.${t}.algolia.com`}]},e),{headers:u(g(u({},r.headers()),{"content-type":"application/json"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)}));return me.addMethods({appId:e.appId,transporter:s},e.methods)},Ir=e=>t=>e.transporter.read({method:nt.MethodEnum.Get,path:"1/strategies/personalization"},t),Ar=e=>(t,r)=>e.transporter.write({method:nt.MethodEnum.Post,path:"1/strategies/personalization",data:t},r);G.createRecommendationClient=Or;G.getPersonalizationStrategy=Ir;G.setPersonalizationStrategy=Ar});var it=I((nn,ot)=>{ot.exports=at()});var jt=I(i=>{"use strict";Object.defineProperty(i,"__esModule",{value:!0});var l=F(),q=K(),m=B(),Sr=require("crypto");function Y(e){let t=r=>e.request(r).then(s=>{if(e.batch!==void 0&&e.batch(s.hits),!e.shouldStop(s))return s.cursor?t({cursor:s.cursor}):t({page:(r.page||0)+1})});return t({})}var Dr=e=>{let t=e.appId,r=l.createAuth(e.authMode!==void 0?e.authMode:l.AuthMode.WithinHeaders,t,e.apiKey),s=q.createTransporter(g(u({hosts:[{url:`${t}-dsn.algolia.net`,accept:q.CallEnum.Read},{url:`${t}.algolia.net`,accept:q.CallEnum.Write}].concat(l.shuffle([{url:`${t}-1.algolianet.com`},{url:`${t}-2.algolianet.com`},{url:`${t}-3.algolianet.com`}]))},e),{headers:u(g(u({},r.headers()),{"content-type":"application/x-www-form-urlencoded"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)})),n={transporter:s,appId:t,addAlgoliaAgent(a,o){s.userAgent.add({segment:a,version:o})},clearCache(){return Promise.all([s.requestsCache.clear(),s.responsesCache.clear()]).then(()=>{})}};return l.addMethods(n,e.methods)};function ct(){return{name:"MissingObjectIDError",message:"All objects must have an unique objectID (like a primary key) to be valid. Algolia is also able to generate objectIDs automatically but *it's not recommended*. To do it, use the `{'autoGenerateObjectIDIfNotExist': true}` option."}}function ut(){return{name:"ObjectNotFoundError",message:"Object not found."}}function lt(){return{name:"ValidUntilNotFoundError",message:"ValidUntil not found in given secured api key."}}var Rr=e=>(t,r)=>{let d=r||{},{queryParameters:s}=d,n=R(d,["queryParameters"]),a=u({acl:t},s!==void 0?{queryParameters:s}:{}),o=(y,b)=>l.createRetryablePromise(f=>$(e)(y.key,b).catch(p=>{if(p.status!==404)throw p;return f()}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:"1/keys",data:a},n),o)},vr=e=>(t,r,s)=>{let n=q.createMappedRequestOptions(s);return n.queryParameters["X-Algolia-User-ID"]=t,e.transporter.write({method:m.MethodEnum.Post,path:"1/clusters/mapping",data:{cluster:r}},n)},xr=e=>(t,r,s)=>e.transporter.write({method:m.MethodEnum.Post,path:"1/clusters/mapping/batch",data:{users:t,cluster:r}},s),Z=e=>(t,r,s)=>{let n=(a,o)=>L(e)(t,{methods:{waitTask:D}}).waitTask(a.taskID,o);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",t),data:{operation:"copy",destination:r}},s),n)},qr=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Rules]})),Er=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Settings]})),Tr=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Synonyms]})),Mr=e=>(t,r)=>{let s=(n,a)=>l.createRetryablePromise(o=>$(e)(t,a).then(o).catch(d=>{if(d.status!==404)throw d}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/keys/%s",t)},r),s)},wr=()=>(e,t)=>{let r=q.serializeQueryParameters(t),s=Sr.createHmac("sha256",e).update(r).digest("hex");return Buffer.from(s+r).toString("base64")},$=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/keys/%s",t)},r),kr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/logs"},t),Cr=()=>e=>{let t=Buffer.from(e,"base64").toString("ascii"),r=/validUntil=(\d+)/,s=t.match(r);if(s===null)throw lt();return parseInt(s[1],10)-Math.round(new Date().getTime()/1e3)},Ur=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping/top"},t),Nr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/clusters/mapping/%s",t)},r),Wr=e=>t=>{let n=t||{},{retrieveMappings:r}=n,s=R(n,["retrieveMappings"]);return r===!0&&(s.getClusters=!0),e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping/pending"},s)},L=e=>(t,r={})=>{let s={transporter:e.transporter,appId:e.appId,indexName:t};return l.addMethods(s,r.methods)},Hr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/keys"},t),_r=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters"},t),Fr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/indexes"},t),Br=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping"},t),Kr=e=>(t,r,s)=>{let n=(a,o)=>L(e)(t,{methods:{waitTask:D}}).waitTask(a.taskID,o);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",t),data:{operation:"move",destination:r}},s),n)},zr=e=>(t,r)=>{let s=(n,a)=>Promise.all(Object.keys(n.taskID).map(o=>L(e)(o,{methods:{waitTask:D}}).waitTask(n.taskID[o],a)));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:"1/indexes/*/batch",data:{requests:t}},r),s)},Gr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:t}},r),$r=e=>(t,r)=>{let s=t.map(n=>g(u({},n),{params:q.serializeQueryParameters(n.params||{})}));return e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/queries",data:{requests:s},cacheable:!0},r)},Lr=e=>(t,r)=>Promise.all(t.map(s=>{let d=s.params,{facetName:n,facetQuery:a}=d,o=R(d,["facetName","facetQuery"]);return L(e)(s.indexName,{methods:{searchForFacetValues:dt}}).searchForFacetValues(n,a,u(u({},r),o))})),Vr=e=>(t,r)=>{let s=q.createMappedRequestOptions(r);return s.queryParameters["X-Algolia-User-ID"]=t,e.transporter.write({method:m.MethodEnum.Delete,path:"1/clusters/mapping"},s)},Qr=e=>(t,r)=>{let s=(n,a)=>l.createRetryablePromise(o=>$(e)(t,a).catch(d=>{if(d.status!==404)throw d;return o()}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/keys/%s/restore",t)},r),s)},Jr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:"1/clusters/mapping/search",data:{query:t}},r),Xr=e=>(t,r)=>{let s=Object.assign({},r),f=r||{},{queryParameters:n}=f,a=R(f,["queryParameters"]),o=n?{queryParameters:n}:{},d=["acl","indexes","referers","restrictSources","queryParameters","description","maxQueriesPerIPPerHour","maxHitsPerQuery"],y=p=>Object.keys(s).filter(h=>d.indexOf(h)!==-1).every(h=>p[h]===s[h]),b=(p,h)=>l.createRetryablePromise(S=>$(e)(t,h).then(O=>y(O)?Promise.resolve():S()));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Put,path:l.encode("1/keys/%s",t),data:o},a),b)},pt=e=>(t,r)=>{let s=(n,a)=>D(e)(n.taskID,a);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/batch",e.indexName),data:{requests:t}},r),s)},Yr=e=>t=>Y(g(u({},t),{shouldStop:r=>r.cursor===void 0,request:r=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/browse",e.indexName),data:r},t)})),Zr=e=>t=>{let r=u({hitsPerPage:1e3},t);return Y(g(u({},r),{shouldStop:s=>s.hits.lengthg(u({},n),{hits:n.hits.map(a=>(delete a._highlightResult,a))}))}}))},es=e=>t=>{let r=u({hitsPerPage:1e3},t);return Y(g(u({},r),{shouldStop:s=>s.hits.lengthg(u({},n),{hits:n.hits.map(a=>(delete a._highlightResult,a))}))}}))},te=e=>(t,r,s)=>{let y=s||{},{batchSize:n}=y,a=R(y,["batchSize"]),o={taskIDs:[],objectIDs:[]},d=(b=0)=>{let f=[],p;for(p=b;p({action:r,body:h})),a).then(h=>(o.objectIDs=o.objectIDs.concat(h.objectIDs),o.taskIDs.push(h.taskID),p++,d(p)))};return l.createWaitablePromise(d(),(b,f)=>Promise.all(b.taskIDs.map(p=>D(e)(p,f))))},ts=e=>t=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/clear",e.indexName)},t),(r,s)=>D(e)(r.taskID,s)),rs=e=>t=>{let a=t||{},{forwardToReplicas:r}=a,s=R(a,["forwardToReplicas"]),n=q.createMappedRequestOptions(s);return r&&(n.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/clear",e.indexName)},n),(o,d)=>D(e)(o.taskID,d))},ss=e=>t=>{let a=t||{},{forwardToReplicas:r}=a,s=R(a,["forwardToReplicas"]),n=q.createMappedRequestOptions(s);return r&&(n.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/clear",e.indexName)},n),(o,d)=>D(e)(o.taskID,d))},ns=e=>(t,r)=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/deleteByQuery",e.indexName),data:t},r),(s,n)=>D(e)(s.taskID,n)),as=e=>t=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s",e.indexName)},t),(r,s)=>D(e)(r.taskID,s)),os=e=>(t,r)=>l.createWaitablePromise(yt(e)([t],r).then(s=>({taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),yt=e=>(t,r)=>{let s=t.map(n=>({objectID:n}));return te(e)(s,k.DeleteObject,r)},is=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s/rules/%s",e.indexName,t)},a),(d,y)=>D(e)(d.taskID,y))},cs=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s/synonyms/%s",e.indexName,t)},a),(d,y)=>D(e)(d.taskID,y))},us=e=>t=>gt(e)(t).then(()=>!0).catch(r=>{if(r.status!==404)throw r;return!1}),ls=e=>(t,r)=>{let y=r||{},{query:s,paginate:n}=y,a=R(y,["query","paginate"]),o=0,d=()=>ft(e)(s||"",g(u({},a),{page:o})).then(b=>{for(let[f,p]of Object.entries(b.hits))if(t(p))return{object:p,position:parseInt(f,10),page:o};if(o++,n===!1||o>=b.nbPages)throw ut();return d()});return d()},ds=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/%s",e.indexName,t)},r),ps=()=>(e,t)=>{for(let[r,s]of Object.entries(e.hits))if(s.objectID===t)return parseInt(r,10);return-1},ms=e=>(t,r)=>{let o=r||{},{attributesToRetrieve:s}=o,n=R(o,["attributesToRetrieve"]),a=t.map(d=>u({indexName:e.indexName,objectID:d},s?{attributesToRetrieve:s}:{}));return e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:a}},n)},hs=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/rules/%s",e.indexName,t)},r),gt=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/settings",e.indexName),data:{getVersion:2}},t),ys=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/synonyms/%s",e.indexName,t)},r),bt=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/task/%s",e.indexName,t.toString())},r),gs=e=>(t,r)=>l.createWaitablePromise(Pt(e)([t],r).then(s=>({objectID:s.objectIDs[0],taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),Pt=e=>(t,r)=>{let o=r||{},{createIfNotExists:s}=o,n=R(o,["createIfNotExists"]),a=s?k.PartialUpdateObject:k.PartialUpdateObjectNoCreate;return te(e)(t,a,n)},fs=e=>(t,r)=>{let O=r||{},{safe:s,autoGenerateObjectIDIfNotExist:n,batchSize:a}=O,o=R(O,["safe","autoGenerateObjectIDIfNotExist","batchSize"]),d=(P,x,v,j)=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",P),data:{operation:v,destination:x}},j),(T,V)=>D(e)(T.taskID,V)),y=Math.random().toString(36).substring(7),b=`${e.indexName}_tmp_${y}`,f=he({appId:e.appId,transporter:e.transporter,indexName:b}),p=[],h=d(e.indexName,b,"copy",g(u({},o),{scope:["settings","synonyms","rules"]}));p.push(h);let S=(s?h.wait(o):h).then(()=>{let P=f(t,g(u({},o),{autoGenerateObjectIDIfNotExist:n,batchSize:a}));return p.push(P),s?P.wait(o):P}).then(()=>{let P=d(b,e.indexName,"move",o);return p.push(P),s?P.wait(o):P}).then(()=>Promise.all(p)).then(([P,x,v])=>({objectIDs:x.objectIDs,taskIDs:[P.taskID,...x.taskIDs,v.taskID]}));return l.createWaitablePromise(S,(P,x)=>Promise.all(p.map(v=>v.wait(x))))},bs=e=>(t,r)=>ye(e)(t,g(u({},r),{clearExistingRules:!0})),Ps=e=>(t,r)=>ge(e)(t,g(u({},r),{replaceExistingSynonyms:!0})),js=e=>(t,r)=>l.createWaitablePromise(he(e)([t],r).then(s=>({objectID:s.objectIDs[0],taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),he=e=>(t,r)=>{let o=r||{},{autoGenerateObjectIDIfNotExist:s}=o,n=R(o,["autoGenerateObjectIDIfNotExist"]),a=s?k.AddObject:k.UpdateObject;if(a===k.UpdateObject){for(let d of t)if(d.objectID===void 0)return l.createWaitablePromise(Promise.reject(ct()))}return te(e)(t,a,n)},Os=e=>(t,r)=>ye(e)([t],r),ye=e=>(t,r)=>{let d=r||{},{forwardToReplicas:s,clearExistingRules:n}=d,a=R(d,["forwardToReplicas","clearExistingRules"]),o=q.createMappedRequestOptions(a);return s&&(o.queryParameters.forwardToReplicas=1),n&&(o.queryParameters.clearExistingRules=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/batch",e.indexName),data:t},o),(y,b)=>D(e)(y.taskID,b))},Is=e=>(t,r)=>ge(e)([t],r),ge=e=>(t,r)=>{let d=r||{},{forwardToReplicas:s,replaceExistingSynonyms:n}=d,a=R(d,["forwardToReplicas","replaceExistingSynonyms"]),o=q.createMappedRequestOptions(a);return s&&(o.queryParameters.forwardToReplicas=1),n&&(o.queryParameters.replaceExistingSynonyms=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/batch",e.indexName),data:t},o),(y,b)=>D(e)(y.taskID,b))},ft=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/query",e.indexName),data:{query:t},cacheable:!0},r),dt=e=>(t,r,s)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/facets/%s/query",e.indexName,t),data:{facetQuery:r},cacheable:!0},s),mt=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/search",e.indexName),data:{query:t}},r),ht=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/search",e.indexName),data:{query:t}},r),As=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Put,path:l.encode("1/indexes/%s/settings",e.indexName),data:t},a),(d,y)=>D(e)(d.taskID,y))},D=e=>(t,r)=>l.createRetryablePromise(s=>bt(e)(t,r).then(n=>n.status!=="published"?s():void 0)),Ss={AddObject:"addObject",Analytics:"analytics",Browser:"browse",DeleteIndex:"deleteIndex",DeleteObject:"deleteObject",EditSettings:"editSettings",ListIndexes:"listIndexes",Logs:"logs",Recommendation:"recommendation",Search:"search",SeeUnretrievableAttributes:"seeUnretrievableAttributes",Settings:"settings",Usage:"usage"},k={AddObject:"addObject",UpdateObject:"updateObject",PartialUpdateObject:"partialUpdateObject",PartialUpdateObjectNoCreate:"partialUpdateObjectNoCreate",DeleteObject:"deleteObject"},ee={Settings:"settings",Synonyms:"synonyms",Rules:"rules"},Ds={None:"none",StopIfEnoughMatches:"stopIfEnoughMatches"},Rs={Synonym:"synonym",OneWaySynonym:"oneWaySynonym",AltCorrection1:"altCorrection1",AltCorrection2:"altCorrection2",Placeholder:"placeholder"};i.ApiKeyACLEnum=Ss;i.BatchActionEnum=k;i.ScopeEnum=ee;i.StrategyEnum=Ds;i.SynonymEnum=Rs;i.addApiKey=Rr;i.assignUserID=vr;i.assignUserIDs=xr;i.batch=pt;i.browseObjects=Yr;i.browseRules=Zr;i.browseSynonyms=es;i.chunkedBatch=te;i.clearObjects=ts;i.clearRules=rs;i.clearSynonyms=ss;i.copyIndex=Z;i.copyRules=qr;i.copySettings=Er;i.copySynonyms=Tr;i.createBrowsablePromise=Y;i.createMissingObjectIDError=ct;i.createObjectNotFoundError=ut;i.createSearchClient=Dr;i.createValidUntilNotFoundError=lt;i.deleteApiKey=Mr;i.deleteBy=ns;i.deleteIndex=as;i.deleteObject=os;i.deleteObjects=yt;i.deleteRule=is;i.deleteSynonym=cs;i.exists=us;i.findObject=ls;i.generateSecuredApiKey=wr;i.getApiKey=$;i.getLogs=kr;i.getObject=ds;i.getObjectPosition=ps;i.getObjects=ms;i.getRule=hs;i.getSecuredApiKeyRemainingValidity=Cr;i.getSettings=gt;i.getSynonym=ys;i.getTask=bt;i.getTopUserIDs=Ur;i.getUserID=Nr;i.hasPendingMappings=Wr;i.initIndex=L;i.listApiKeys=Hr;i.listClusters=_r;i.listIndices=Fr;i.listUserIDs=Br;i.moveIndex=Kr;i.multipleBatch=zr;i.multipleGetObjects=Gr;i.multipleQueries=$r;i.multipleSearchForFacetValues=Lr;i.partialUpdateObject=gs;i.partialUpdateObjects=Pt;i.removeUserID=Vr;i.replaceAllObjects=fs;i.replaceAllRules=bs;i.replaceAllSynonyms=Ps;i.restoreApiKey=Qr;i.saveObject=js;i.saveObjects=he;i.saveRule=Os;i.saveRules=ye;i.saveSynonym=Is;i.saveSynonyms=ge;i.search=ft;i.searchForFacetValues=dt;i.searchRules=mt;i.searchSynonyms=ht;i.searchUserIDs=Jr;i.setSettings=As;i.updateApiKey=Xr;i.waitTask=D});var It=I((on,Ot)=>{Ot.exports=jt()});var At=I(re=>{"use strict";Object.defineProperty(re,"__esModule",{value:!0});function vs(){return{debug(e,t){return Promise.resolve()},info(e,t){return Promise.resolve()},error(e,t){return Promise.resolve()}}}var xs={Debug:1,Info:2,Error:3};re.LogLevelEnum=xs;re.createNullLogger=vs});var Dt=I((un,St)=>{St.exports=At()});var xt=I(fe=>{"use strict";Object.defineProperty(fe,"__esModule",{value:!0});var Rt=require("http"),vt=require("https"),qs=require("url");function Es(){let e={keepAlive:!0},t=new Rt.Agent(e),r=new vt.Agent(e);return{send(s){return new Promise(n=>{let a=qs.parse(s.url),o=a.query===null?a.pathname:`${a.pathname}?${a.query}`,d=u({agent:a.protocol==="https:"?r:t,hostname:a.hostname,path:o,method:s.method,headers:s.headers},a.port!==void 0?{port:a.port||""}:{}),y=(a.protocol==="https:"?vt:Rt).request(d,h=>{let S="";h.on("data",O=>S+=O),h.on("end",()=>{clearTimeout(f),clearTimeout(p),n({status:h.statusCode||0,content:S,isTimedOut:!1})})}),b=(h,S)=>setTimeout(()=>{y.abort(),n({status:0,content:S,isTimedOut:!0})},h*1e3),f=b(s.connectTimeout,"Connection timeout"),p;y.on("error",h=>{clearTimeout(f),clearTimeout(p),n({status:0,content:h.message,isTimedOut:!1})}),y.once("response",()=>{clearTimeout(f),p=b(s.responseTimeout,"Socket timeout")}),s.data!==void 0&&y.write(s.data),y.end()})},destroy(){return t.destroy(),r.destroy(),Promise.resolve()}}}fe.createNodeHttpRequester=Es});var Et=I((dn,qt)=>{qt.exports=xt()});var kt=I((pn,Tt)=>{"use strict";var Mt=Ee(),Ts=we(),W=st(),be=F(),Pe=it(),c=It(),Ms=Dt(),ws=Et(),ks=K();function wt(e,t,r){let s={appId:e,apiKey:t,timeouts:{connect:2,read:5,write:30},requester:ws.createNodeHttpRequester(),logger:Ms.createNullLogger(),responsesCache:Mt.createNullCache(),requestsCache:Mt.createNullCache(),hostsCache:Ts.createInMemoryCache(),userAgent:ks.createUserAgent(be.version).add({segment:"Node.js",version:process.versions.node})};return c.createSearchClient(g(u(u({},s),r),{methods:{search:c.multipleQueries,searchForFacetValues:c.multipleSearchForFacetValues,multipleBatch:c.multipleBatch,multipleGetObjects:c.multipleGetObjects,multipleQueries:c.multipleQueries,copyIndex:c.copyIndex,copySettings:c.copySettings,copyRules:c.copyRules,copySynonyms:c.copySynonyms,moveIndex:c.moveIndex,listIndices:c.listIndices,getLogs:c.getLogs,listClusters:c.listClusters,multipleSearchForFacetValues:c.multipleSearchForFacetValues,getApiKey:c.getApiKey,addApiKey:c.addApiKey,listApiKeys:c.listApiKeys,updateApiKey:c.updateApiKey,deleteApiKey:c.deleteApiKey,restoreApiKey:c.restoreApiKey,assignUserID:c.assignUserID,assignUserIDs:c.assignUserIDs,getUserID:c.getUserID,searchUserIDs:c.searchUserIDs,listUserIDs:c.listUserIDs,getTopUserIDs:c.getTopUserIDs,removeUserID:c.removeUserID,hasPendingMappings:c.hasPendingMappings,generateSecuredApiKey:c.generateSecuredApiKey,getSecuredApiKeyRemainingValidity:c.getSecuredApiKeyRemainingValidity,destroy:be.destroy,initIndex:n=>a=>c.initIndex(n)(a,{methods:{batch:c.batch,delete:c.deleteIndex,getObject:c.getObject,getObjects:c.getObjects,saveObject:c.saveObject,saveObjects:c.saveObjects,search:c.search,searchForFacetValues:c.searchForFacetValues,waitTask:c.waitTask,setSettings:c.setSettings,getSettings:c.getSettings,partialUpdateObject:c.partialUpdateObject,partialUpdateObjects:c.partialUpdateObjects,deleteObject:c.deleteObject,deleteObjects:c.deleteObjects,deleteBy:c.deleteBy,clearObjects:c.clearObjects,browseObjects:c.browseObjects,getObjectPosition:c.getObjectPosition,findObject:c.findObject,exists:c.exists,saveSynonym:c.saveSynonym,saveSynonyms:c.saveSynonyms,getSynonym:c.getSynonym,searchSynonyms:c.searchSynonyms,browseSynonyms:c.browseSynonyms,deleteSynonym:c.deleteSynonym,clearSynonyms:c.clearSynonyms,replaceAllObjects:c.replaceAllObjects,replaceAllSynonyms:c.replaceAllSynonyms,searchRules:c.searchRules,getRule:c.getRule,deleteRule:c.deleteRule,saveRule:c.saveRule,saveRules:c.saveRules,replaceAllRules:c.replaceAllRules,browseRules:c.browseRules,clearRules:c.clearRules}}),initAnalytics:()=>n=>W.createAnalyticsClient(g(u(u({},s),n),{methods:{addABTest:W.addABTest,getABTest:W.getABTest,getABTests:W.getABTests,stopABTest:W.stopABTest,deleteABTest:W.deleteABTest}})),initRecommendation:()=>n=>Pe.createRecommendationClient(g(u(u({},s),n),{methods:{getPersonalizationStrategy:Pe.getPersonalizationStrategy,setPersonalizationStrategy:Pe.setPersonalizationStrategy}}))}}))}wt.version=be.version;Tt.exports=wt});var Ut=I((mn,je)=>{var Ct=kt();je.exports=Ct;je.exports.default=Ct});var Ws={};Vt(Ws,{default:()=>Ks});var Oe=C(require("@yarnpkg/core")),E=C(require("@yarnpkg/core")),Ie=C(require("@yarnpkg/plugin-essentials")),Ht=C(require("semver"));var se=C(require("@yarnpkg/core")),Nt=C(Ut()),Cs="e8e1bd300d860104bb8c58453ffa1eb4",Us="OFCNCOG2CU",Wt=async(e,t)=>{var a;let r=se.structUtils.stringifyIdent(e),n=Ns(t).initIndex("npm-search");try{return((a=(await n.getObject(r,{attributesToRetrieve:["types"]})).types)==null?void 0:a.ts)==="definitely-typed"}catch(o){return!1}},Ns=e=>(0,Nt.default)(Us,Cs,{requester:{async send(r){try{let s=await se.httpUtils.request(r.url,r.data||null,{configuration:e,headers:r.headers});return{content:s.body,isTimedOut:!1,status:s.statusCode}}catch(s){return{content:s.response.body,isTimedOut:!1,status:s.response.statusCode}}}}});var _t=e=>e.scope?`${e.scope}__${e.name}`:`${e.name}`,Hs=async(e,t,r,s)=>{if(r.scope==="types")return;let{project:n}=e,{configuration:a}=n,o=a.makeResolver(),d={project:n,resolver:o,report:new E.ThrowReport};if(!await Wt(r,a))return;let b=_t(r),f=E.structUtils.parseRange(r.range).selector;if(!E.semverUtils.validRange(f)){let P=await o.getCandidates(r,new Map,d);f=E.structUtils.parseRange(P[0].reference).selector}let p=Ht.default.coerce(f);if(p===null)return;let h=`${Ie.suggestUtils.Modifier.CARET}${p.major}`,S=E.structUtils.makeDescriptor(E.structUtils.makeIdent("types",b),h),O=E.miscUtils.mapAndFind(n.workspaces,P=>{var T,V;let x=(T=P.manifest.dependencies.get(r.identHash))==null?void 0:T.descriptorHash,v=(V=P.manifest.devDependencies.get(r.identHash))==null?void 0:V.descriptorHash;if(x!==r.descriptorHash&&v!==r.descriptorHash)return E.miscUtils.mapAndFind.skip;let j=[];for(let Ae of Oe.Manifest.allDependencies){let Se=P.manifest[Ae].get(S.identHash);typeof Se!="undefined"&&j.push([Ae,Se])}return j.length===0?E.miscUtils.mapAndFind.skip:j});if(typeof O!="undefined")for(let[P,x]of O)e.manifest[P].set(x.identHash,x);else{try{if((await o.getCandidates(S,new Map,d)).length===0)return}catch{return}e.manifest[Ie.suggestUtils.Target.DEVELOPMENT].set(S.identHash,S)}},_s=async(e,t,r)=>{if(r.scope==="types")return;let s=_t(r),n=E.structUtils.makeIdent("types",s);for(let a of Oe.Manifest.allDependencies)typeof e.manifest[a].get(n.identHash)!="undefined"&&e.manifest[a].delete(n.identHash)},Fs=(e,t)=>{t.publishConfig&&t.publishConfig.typings&&(t.typings=t.publishConfig.typings),t.publishConfig&&t.publishConfig.types&&(t.types=t.publishConfig.types)},Bs={hooks:{afterWorkspaceDependencyAddition:Hs,afterWorkspaceDependencyRemoval:_s,beforeWorkspacePacking:Fs}},Ks=Bs;return Ws;})(); 7 | return plugin; 8 | } 9 | }; 10 | -------------------------------------------------------------------------------- /.yarnrc.yml: -------------------------------------------------------------------------------- 1 | nodeLinker: node-modules 2 | 3 | plugins: 4 | - path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs 5 | spec: '@yarnpkg/plugin-interactive-tools' 6 | - path: .yarn/plugins/@yarnpkg/plugin-typescript.cjs 7 | spec: '@yarnpkg/plugin-typescript' 8 | - path: .yarn/plugins/@yarnpkg/plugin-production-install.cjs 9 | spec: 'https://gitlab.com/Larry1123/yarn-contrib/-/raw/master/packages/plugin-production-install/bundles/@yarnpkg/plugin-production-install.js' 10 | 11 | yarnPath: .yarn/releases/yarn-3.5.0.cjs 12 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:18-alpine AS builder 2 | 3 | WORKDIR /app 4 | 5 | COPY package.json yarn.lock .yarnrc.yml ./ 6 | COPY .yarn ./.yarn 7 | 8 | RUN yarn install --immutable 9 | 10 | COPY . . 11 | 12 | RUN yarn build 13 | 14 | RUN yarn prod-install out 15 | RUN mv build out/build 16 | 17 | RUN rm -rf prod/.yarn/ 18 | 19 | ############################################################ 20 | # Final Image 21 | ############################################################ 22 | FROM node:18-alpine 23 | 24 | WORKDIR /app 25 | 26 | COPY --from=builder /app/out . 27 | 28 | ENV NODE_ENV production 29 | ENV LOG_FILES_ENABLE false 30 | 31 | CMD [ "node", "build/index.js" ] 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Jorge Barnaby 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OVH Availability Checker 2 | 3 | This is a NodeJS application that checks for the availability of OVH / Kimsufi / SoYouStart servers (according to `servers.json` config file) and if they are in stock, it can trigger: 4 | 5 | - An email through a configured SMTP server. 6 | - An SMS message through Twilio. 7 | - A PushBullet notification. 8 | - Telegram notification. 9 | 10 | ## Configuration 11 | 12 | All the configuration can be made through two files: 13 | 14 | - `.env` -> Where all the credentials and other config are stored (ie: SMTP, Twilio API, Pushbullet, Telegram, etc). Create your own `.env` file from `.env.sample`. 15 | - `servers.json` -> Where you can add the OVH / Kimsufi / SoYouStart servers you want to monitor. 16 | 17 | ## Run with NodeJS 18 | 19 | ```bash 20 | # Install all dependencies: 21 | yarn install 22 | # or just: 23 | yarn 24 | # Run application 25 | yarn start 26 | ``` 27 | 28 | ## Run with Docker 29 | 30 | The following command will build the Docker image, based on a NodeJS Alpine image, so should have a fairly small footprint, and spin it up in the background: 31 | 32 | ```bash 33 | docker compose up -f docker-compose.dev.yml -d 34 | ``` 35 | 36 | You can also use the Docker image directly from GitHub Container Registry with the following command: 37 | 38 | ```bash 39 | docker compose up -d 40 | ``` 41 | 42 | You can easily use it out of the box and run it in a headless server like a cheap VPS using Docker. It will run in the background, even after you have logged out of the server. 43 | 44 | ## License 45 | 46 | MIT License 47 | 48 | Copyright (c) 2023 Jorge Barnaby 49 | 50 | See [LICENSE](LICENSE) 51 | -------------------------------------------------------------------------------- /docker-compose.dev.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | 3 | services: 4 | node: 5 | build: . 6 | container_name: ovh-availability-checker 7 | volumes: 8 | - ./servers.json:/app/servers.json 9 | env_file: .env 10 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | 3 | services: 4 | node: 5 | image: ghcr.io/yorch/ovh-availability-checker 6 | container_name: ovh-availability-checker 7 | restart: unless-stopped 8 | volumes: 9 | - ./servers.json:/app/servers.json 10 | env_file: .env 11 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ovh-availability-checker", 3 | "version": "2.1.0", 4 | "description": "OVH Servers Availability Checker", 5 | "main": "src/index.ts", 6 | "type": "module", 7 | "scripts": { 8 | "start": "ts-node --esm src/index.ts", 9 | "scrape": "ts-node --esm src/scrape.ts", 10 | "build": "tsc", 11 | "lint": "xo", 12 | "format": "prettier --write .", 13 | "cleanup": "rm -rf build/" 14 | }, 15 | "author": "Jorge Barnaby = 16.0.0" 55 | }, 56 | "packageManager": "yarn@3.5.0", 57 | "xo": { 58 | "prettier": true 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /servers.json: -------------------------------------------------------------------------------- 1 | { 2 | "2201sys02": { 3 | "enable": true, 4 | "name": "SYS-2-SAT-32", 5 | "price": "€24.99", 6 | "ram": "32GB DDR3 ECC 2133MHz", 7 | "cpu": "Intel Xeon D1540", 8 | "disk": "SoftRAID 4x2TB SATA", 9 | "network": "250Mpbs", 10 | "datacenters": ["gra", "rbx"] 11 | }, 12 | "2209sys02": { 13 | "enable": true, 14 | "name": "SYS-2-SAT-32", 15 | "price": "€24.99", 16 | "ram": "32GB DDR3 ECC 2133MHz", 17 | "cpu": "Intel Xeon D1540", 18 | "disk": "SoftRAID 4x2TB SATA", 19 | "network": "250Mpbs", 20 | "datacenters": ["gra", "rbx"] 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/actions/index.ts: -------------------------------------------------------------------------------- 1 | export { sendEmail } from './send-email.js'; 2 | export { sendPushbullet } from './send-pushbullet.js'; 3 | export { sendTelegram } from './send-telegram.js'; 4 | export { sendSms } from './send-sms.js'; 5 | -------------------------------------------------------------------------------- /src/actions/send-email.ts: -------------------------------------------------------------------------------- 1 | import nodemailer from 'nodemailer'; 2 | import { email } from '../config.js'; 3 | import type { Action } from '../types.js'; 4 | 5 | const { 6 | enable, 7 | from, 8 | smtp: { host, port, secure, user, pass }, 9 | subject, 10 | toEmail, 11 | } = email; 12 | 13 | export const sendEmail: Action = async ({ content, logger }) => { 14 | if (!enable) { 15 | return; 16 | } 17 | 18 | if (!host || !port || !user || !pass || !from.email || !from.name) { 19 | logger.warn(`Email not fully configured, can't send emails.`); 20 | return; 21 | } 22 | 23 | const transporter = nodemailer.createTransport({ 24 | host, 25 | port, 26 | secure, 27 | auth: { 28 | user, 29 | pass, 30 | }, 31 | }); 32 | try { 33 | const info = await transporter.sendMail({ 34 | from: `"${from.name}" <${from.email}>`, 35 | to: toEmail, 36 | subject, 37 | text: content, 38 | html: content, 39 | }); 40 | const { messageId } = info; 41 | 42 | logger.info(`Email message sent: ${messageId}`); 43 | // Message sent: 44 | 45 | // Preview only available when sending through an Ethereal account 46 | // logger.info(`Preview URL: ${nodemailer.getTestMessageUrl(info)}`); 47 | // Preview URL: https://ethereal.email/message/WaQKMgKddxQDoou... 48 | } catch (error) { 49 | logger.error(error, 'Could not send the email'); 50 | } 51 | }; 52 | -------------------------------------------------------------------------------- /src/actions/send-pushbullet.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call */ 2 | import { pushbullet } from '../config.js'; 3 | import type { Action } from '../types.js'; 4 | 5 | const { apiToken, enable, deviceId, noteTitle } = pushbullet; 6 | 7 | export const sendPushbullet: Action = async ({ content, logger }) => { 8 | if (!enable) { 9 | return; 10 | } 11 | 12 | // @ts-expect-error - Typing missing 13 | // eslint-disable-next-line @typescript-eslint/naming-convention 14 | const PushBullet = await import('pushbullet'); 15 | 16 | if (!apiToken || !deviceId || !noteTitle) { 17 | logger.warn(`Pushbullet not configured correctly, can't send messages`); 18 | return; 19 | } 20 | 21 | try { 22 | const pusher = new PushBullet(apiToken); 23 | const response = await pusher.note(deviceId, noteTitle, content); 24 | logger.info(`Pushbullet sent!`); 25 | logger.debug(`Pushbullet: ${String(response || '')}`); 26 | } catch (error) { 27 | logger.error(error, 'Could not send pushbullet'); 28 | } 29 | }; 30 | -------------------------------------------------------------------------------- /src/actions/send-sms.ts: -------------------------------------------------------------------------------- 1 | import { SmsSender } from 'simple-sms-sender'; 2 | import { sms } from '../config.js'; 3 | import type { Action } from '../types.js'; 4 | 5 | const { 6 | enable, 7 | twilio: { accountSid, from, secret, sid }, 8 | toNumber, 9 | } = sms; 10 | 11 | export const sendSms: Action = async ({ content, logger }) => { 12 | if (!enable) { 13 | return; 14 | } 15 | 16 | if (!accountSid || !from || !secret || !sid) { 17 | logger.warn('SMS not configured properly, no SMS can be send'); 18 | return; 19 | } 20 | 21 | if (!toNumber) { 22 | logger.warn('No SMS send as no number was given'); 23 | return; 24 | } 25 | 26 | const smsSender = new SmsSender({ 27 | accountSid, 28 | fromNumber: from, 29 | // @ts-expect-error - Fix typing and making sure `simple-sms-sender` can work with Pino 30 | logger, 31 | secret, 32 | sid, 33 | }); 34 | 35 | await smsSender.sendSms({ 36 | body: content, 37 | recipients: toNumber, 38 | }); 39 | }; 40 | -------------------------------------------------------------------------------- /src/actions/send-telegram.ts: -------------------------------------------------------------------------------- 1 | // https://github.com/yagop/node-telegram-bot-api/blob/master/src/telegram.js#L64-L84 2 | import TelegramBot from 'node-telegram-bot-api'; 3 | import { telegram } from '../config.js'; 4 | import type { Action } from '../types.js'; 5 | 6 | const { botToken, enable, chatId } = telegram; 7 | 8 | export const sendTelegram: Action = async ({ content, logger }) => { 9 | if (!enable) { 10 | return; 11 | } 12 | 13 | if (!botToken || !chatId) { 14 | logger.warn('Telegram not configured properly, no message can be send'); 15 | return; 16 | } 17 | 18 | try { 19 | const bot = new TelegramBot(botToken); 20 | 21 | // eslint-disable-next-line @typescript-eslint/naming-convention 22 | const { message_id } = await bot.sendMessage(chatId, content); 23 | 24 | logger.info(`Telegram message sent: ${message_id}`); 25 | } catch (error) { 26 | logger.error(error, 'Could not send Telegram message'); 27 | } 28 | }; 29 | -------------------------------------------------------------------------------- /src/availability-checker.ts: -------------------------------------------------------------------------------- 1 | import got from 'got'; 2 | import type { Dictionary } from 'lodash'; 3 | import lodash from 'lodash'; 4 | import pick from 'lodash/fp/pick.js'; 5 | import * as cron from 'node-cron'; 6 | import type { Logger } from './logger.js'; 7 | import type { 8 | Action, 9 | HardwareAvailability, 10 | ServerAvailable, 11 | ServersAvailable, 12 | ServersToCheck, 13 | } from './types.js'; 14 | 15 | const { groupBy } = lodash; 16 | 17 | const unavailableStates = new Set(['unavailable', 'unknown']); 18 | 19 | export class AvailabilityChecker { 20 | actions: Action[]; 21 | logger: Logger; 22 | serversToCheck: ServersToCheck; 23 | url: string; 24 | scheduledTask: cron.ScheduledTask | undefined; 25 | 26 | constructor({ 27 | actions, 28 | logger, 29 | serversToCheck, 30 | url, 31 | }: { 32 | actions: Action[]; 33 | logger: Logger; 34 | serversToCheck: ServersToCheck; 35 | url: string; 36 | }) { 37 | this.actions = actions; 38 | this.logger = logger; 39 | this.serversToCheck = Object.fromEntries( 40 | Object.entries(serversToCheck).filter(([, { enable }]) => enable) 41 | ); 42 | this.url = url; 43 | 44 | this.logger.debug({ servers: this.serversToCheck }, 'Checking servers'); 45 | } 46 | 47 | async run() { 48 | const availability = await this._obtainAvailability(); 49 | const serversAvailable = this._processAvailabilityResponse( 50 | availability 51 | ).filter(({ availableIn }) => availableIn.length > 0); 52 | 53 | if (!serversAvailable || serversAvailable.length === 0) { 54 | this.logger.info('No available servers'); 55 | } else { 56 | const messages = this._buildMessages(serversAvailable); 57 | 58 | // This.logger.info(JSON.stringify(availability, null, 2)); 59 | // this.logger.info(JSON.stringify(serversAvailable, null, 2)); 60 | 61 | await this._runActions(messages); 62 | } 63 | } 64 | 65 | setupSchedule(schedule: string) { 66 | if (this.scheduledTask) { 67 | this.logger.error( 68 | 'There is already an scheduled task, not scheduling a new one' 69 | ); 70 | return; 71 | } 72 | 73 | this.scheduledTask = cron.schedule(schedule, async () => { 74 | await this.run(); 75 | }); 76 | } 77 | 78 | stopSchedule() { 79 | if (!this.scheduledTask) { 80 | this.logger.error('There is no scheduled task'); 81 | return; 82 | } 83 | 84 | this.scheduledTask.stop(); 85 | this.scheduledTask = undefined; 86 | } 87 | 88 | private async _runActions(messages: string[]) { 89 | try { 90 | // TODO: Consolidate multiple messages into same action (ie: same email) 91 | // TODO: Limit concurrency (ie: p-limit) 92 | await Promise.all( 93 | messages.flatMap((message) => { 94 | this.logger.info(`Processing message: ${message}`); 95 | return this.actions.map(this._buildRunAction(message)); 96 | }) 97 | ); 98 | } catch (error) { 99 | this.logger.error(error, 'There was an error executing actions'); 100 | } 101 | } 102 | 103 | private _buildRunAction(message: string) { 104 | return async (action: Action) => { 105 | try { 106 | await action({ content: message, logger: this.logger }); 107 | } catch (error) { 108 | // TODO: Add more info about current action 109 | this.logger.error( 110 | error, 111 | `There was an error executing action ${action.name}` 112 | ); 113 | } 114 | }; 115 | } 116 | 117 | private async _obtainAvailability() { 118 | this.logger.info(`Obtaining availability from ${this.url}`); 119 | const start = Date.now(); 120 | const response = await got(this.url).json(); 121 | const timeTookSecs = (Date.now() - start) / 1000; 122 | this.logger.info( 123 | `Obtained ${response.length} entries in ${timeTookSecs} secs` 124 | ); 125 | 126 | return groupBy( 127 | response.filter(({ hardware }) => 128 | Object.keys(this.serversToCheck).includes(hardware) 129 | ), 130 | 'hardware' 131 | ); 132 | } 133 | 134 | private _processAvailabilityResponse( 135 | response: Dictionary 136 | ): ServersAvailable { 137 | return Object.entries(this.serversToCheck).map( 138 | ([hardwareCode, { datacenters, ...rest }]) => ({ 139 | ...rest, 140 | ...this._processAvailability(response[hardwareCode]), 141 | code: hardwareCode, 142 | }) 143 | ); 144 | } 145 | 146 | private _processAvailability(availabilityPerCode?: HardwareAvailability[]) { 147 | const availability = (availabilityPerCode ?? []).map( 148 | pick(['datacenters', 'region']) 149 | ); 150 | 151 | const datacentersAvailability = availability.flatMap( 152 | ({ datacenters }) => datacenters 153 | ); 154 | 155 | return { 156 | availability, 157 | availableIn: datacentersAvailability 158 | .filter(({ availability }) => !unavailableStates.has(availability)) 159 | .map(({ datacenter }) => datacenter), 160 | datacentersAvailability, 161 | }; 162 | } 163 | 164 | private _buildMessages(serversAvailable: ServersAvailable) { 165 | return serversAvailable.flatMap( 166 | ({ availableIn, datacentersAvailability, name, cpu, ram, disk, price }) => 167 | availableIn.map((dc) => 168 | this._buildMessage({ 169 | name, 170 | cpu, 171 | dc, 172 | ram, 173 | disk, 174 | price, 175 | datacentersAvailability, 176 | }) 177 | ) 178 | ); 179 | } 180 | 181 | private _buildMessage({ 182 | name = '', 183 | dc = '', 184 | cpu = '', 185 | ram = '', 186 | disk = '', 187 | price = '', 188 | datacentersAvailability, 189 | }: Partial & { 190 | dc: string; 191 | }) { 192 | const { availability = '' } = 193 | datacentersAvailability?.find(({ datacenter }) => datacenter === dc) ?? 194 | {}; 195 | 196 | return `${name} (DC: ${dc}): ${cpu}, ${ram}, ${disk} ==> ${price} (availability: ${availability})`; 197 | } 198 | } 199 | -------------------------------------------------------------------------------- /src/config.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable n/prefer-global/process */ 2 | import dotEnv from 'dotenv'; 3 | import envVar from 'env-var'; 4 | 5 | dotEnv.config(); 6 | 7 | const { from, logger } = envVar; 8 | 9 | const env = from(process.env, {}, logger); 10 | 11 | export const availabilityUrl = env 12 | .get('AVAILABILITY_URL') 13 | .required() 14 | .asString(); 15 | 16 | export const inputFile = env.get('INPUT_FILE').required().asString(); 17 | 18 | export const cronSchedule = env.get('CRON_SCHEDULE').asString(); 19 | 20 | export const logFilesEnable = env 21 | .get('LOG_FILES_ENABLE') 22 | .default('true') 23 | .asBool(); 24 | 25 | export const logsDirectory = env.get('LOGS_DIR').default('./logs').asString(); 26 | 27 | export const logLevel = env.get('LOG_LEVEL').default('info').asString(); 28 | 29 | export const nodeEnv = env.get('NODE_ENV').default('development').asString(); 30 | 31 | export const isProduction = nodeEnv === 'production'; 32 | 33 | export const datasetLogger = { 34 | enable: env.get('DATASET_LOGGER_ENABLE').default('false').asBool(), 35 | apiKey: env.get('DATASET_LOGGER_API_KEY').asString(), 36 | serverUrl: env.get('DATASET_LOGGER_SERVER_URL').asString(), 37 | }; 38 | 39 | export const email = { 40 | enable: env.get('EMAIL_ENABLE').default('false').asBool(), 41 | smtp: { 42 | host: env.get('SMTP_HOST').asString(), 43 | port: env.get('SMTP_PORT').asPortNumber(), 44 | // `true` for 465, `false` for other ports 45 | secure: env.get('SMTP_IS_SECURE').asBool(), 46 | user: env.get('SMTP_USER').asString(), 47 | pass: env.get('SMTP_PASS').asString(), 48 | }, 49 | from: { 50 | email: env.get('EMAIL_FROM_ADDRESS').asString(), 51 | name: env.get('EMAIL_FROM_NAME').asString(), 52 | }, 53 | subject: env.get('EMAIL_SUBJECT').asString(), 54 | // Can have multiple emails separated by comma 55 | toEmail: env.get('EMAIL_TO_ADDRESS').asArray(), 56 | }; 57 | 58 | export const sms = { 59 | enable: env.get('SMS_ENABLE').default('false').asBool(), 60 | toNumber: env.get('SMS_TO_NUMBER').asArray(), 61 | twilio: { 62 | accountSid: env.get('TWILIO_ACCOUNT_SID').asString(), 63 | from: env.get('TWILIO_FROM').asString(), 64 | messagingServiceSid: env.get('TWILIO_MESSAGING_SERVICE_SID').asString(), 65 | secret: env.get('TWILIO_SECRET').asString(), 66 | sid: env.get('TWILIO_SID').asString(), 67 | }, 68 | }; 69 | 70 | export const pushbullet = { 71 | enable: env.get('PUSHBULLET_ENABLE').default('false').asBool(), 72 | apiToken: env.get('PUSHBULLET_API_TOKEN').asString(), 73 | deviceId: env.get('PUSHBULLET_DEVICE_ID').asString(), 74 | noteTitle: env.get('PUSHBULLET_NODE_TITLE').asString(), 75 | }; 76 | 77 | export const telegram = { 78 | enable: env.get('TELEGRAM_ENABLE').default('false').asBool(), 79 | botToken: env.get('TELEGRAM_BOT_TOKEN').asString(), 80 | chatId: env.get('TELEGRAM_CHAT_ID').asString(), 81 | }; 82 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable n/prefer-global/process */ 2 | import { init } from './init.js'; 3 | import { logger } from './logger.js'; 4 | 5 | process.on('SIGTERM', () => { 6 | logger.info('Got a SIGTERM, exiting'); 7 | process.exit(1); 8 | }); 9 | 10 | try { 11 | await init(); 12 | } catch (error) { 13 | logger.error(error, 'There was an unexpected error executing the program'); 14 | } 15 | -------------------------------------------------------------------------------- /src/init.ts: -------------------------------------------------------------------------------- 1 | import { 2 | sendEmail, 3 | sendPushbullet, 4 | sendSms, 5 | sendTelegram, 6 | } from './actions/index.js'; 7 | import { AvailabilityChecker } from './availability-checker.js'; 8 | import { availabilityUrl, cronSchedule } from './config.js'; 9 | import { logger } from './logger.js'; 10 | import { readInputFile } from './read-input-file.js'; 11 | 12 | export const init = async () => { 13 | if (!availabilityUrl) { 14 | logger.warn('No availabilityUrl configured, exiting'); 15 | return; 16 | } 17 | 18 | const serversToCheck = await readInputFile(); 19 | 20 | const availabilityChecker = new AvailabilityChecker({ 21 | actions: [sendEmail, sendPushbullet, sendSms, sendTelegram], 22 | logger, 23 | serversToCheck, 24 | url: availabilityUrl, 25 | }); 26 | 27 | if (cronSchedule) { 28 | availabilityChecker.setupSchedule(cronSchedule); 29 | } else { 30 | // Run once 31 | await availabilityChecker.run(); 32 | } 33 | }; 34 | -------------------------------------------------------------------------------- /src/logger.ts: -------------------------------------------------------------------------------- 1 | import os from 'node:os'; 2 | import path from 'node:path'; 3 | import pino from 'pino'; 4 | import type { TransportTargetOptions } from 'pino'; 5 | import { 6 | datasetLogger, 7 | isProduction, 8 | logFilesEnable, 9 | logLevel, 10 | logsDirectory, 11 | } from './config.js'; 12 | import { exists } from './utils.js'; 13 | 14 | const stdOutTarget: TransportTargetOptions = { 15 | target: 'pino/file', 16 | level: logLevel, 17 | options: {}, 18 | }; 19 | 20 | const fileErrorTarget: TransportTargetOptions = { 21 | target: 'pino/file', 22 | level: 'error', 23 | options: { 24 | destination: path.join(logsDirectory, 'error.log'), 25 | mkdir: true, 26 | }, 27 | }; 28 | 29 | const fileTarget: TransportTargetOptions = { 30 | target: 'pino/file', 31 | level: logLevel, 32 | options: { 33 | destination: path.join(logsDirectory, 'combined.log'), 34 | mkdir: true, 35 | }, 36 | }; 37 | 38 | const pinoPrettyTarget = { 39 | target: 'pino-pretty', 40 | level: logLevel, 41 | options: { 42 | colorize: true, 43 | ignore: 'pid,hostname', 44 | // TranslateTime: 'yyyy/mm/dd HH:MM:ss Z', 45 | translateTime: 'HH:MM:ss Z', 46 | }, 47 | }; 48 | 49 | const dataSetTarget = datasetLogger.enable 50 | ? { 51 | target: 'pino-dataset-transport', 52 | level: logLevel, 53 | options: { 54 | loggerOptions: { 55 | apiKey: datasetLogger.apiKey, 56 | ...(datasetLogger.serverUrl 57 | ? { serverUrl: datasetLogger.serverUrl } 58 | : {}), 59 | sessionInfo: { 60 | // TODO: Add build time, version and SHA 61 | serverHost: os.hostname(), 62 | logfile: 'app.log', 63 | osHomedir: os.homedir(), 64 | osPlatform: os.platform(), 65 | osRelease: os.release(), 66 | osType: os.type(), 67 | osVersion: os.version(), 68 | userInfo: os.userInfo(), 69 | }, 70 | shouldFlattenAttributes: true, 71 | }, 72 | }, 73 | } 74 | : undefined; 75 | 76 | export const logger = pino({ 77 | transport: { 78 | targets: [ 79 | // To make sure we always print to console regardless if production or not 80 | isProduction ? stdOutTarget : pinoPrettyTarget, 81 | dataSetTarget, 82 | ...(logFilesEnable ? [fileErrorTarget, fileTarget] : []), 83 | ].filter(exists), // eslint-disable-line unicorn/no-array-callback-reference 84 | }, 85 | level: logLevel, 86 | }); 87 | 88 | export const createChildLogger = (module: string) => logger.child({ module }); 89 | 90 | export type Logger = pino.Logger; 91 | -------------------------------------------------------------------------------- /src/read-input-file.ts: -------------------------------------------------------------------------------- 1 | import { loadJsonFile } from 'load-json-file'; 2 | import { inputFile } from './config.js'; 3 | import type { ServersToCheck } from './types.js'; 4 | 5 | export const readInputFile = async () => { 6 | // TODO: Check if file exists 7 | const serversToCheck = await loadJsonFile(inputFile); 8 | 9 | return serversToCheck; 10 | }; 11 | -------------------------------------------------------------------------------- /src/scrape.ts: -------------------------------------------------------------------------------- 1 | import jsonfile from 'jsonfile'; 2 | import { logger } from './logger.js'; 3 | import { scrapeServers } from './scrape/scrape-servers.js'; 4 | 5 | const filename = 'scrapped-servers.json'; 6 | 7 | const { servers, status } = await scrapeServers(); 8 | 9 | logger.info(`Status Code: ${status}`); 10 | logger.info(`Obtained ${servers.length} servers and saved to ${filename}`); 11 | 12 | await jsonfile.writeFile(filename, servers, { spaces: 2 }); 13 | -------------------------------------------------------------------------------- /src/scrape/_utils.ts: -------------------------------------------------------------------------------- 1 | export const cleanupText = (txt: string) => 2 | txt 3 | .replace(/\r\n|\r|\n/g, '') 4 | .replace(/ {2}/g, '') 5 | .replace( 6 | // eslint-disable-next-line no-irregular-whitespace 7 | /[\u00A0\u1680​\u180E\u2000-\u2009\u200A\u200B\u202F\u205F\u3000]/g, 8 | ' ' 9 | ) 10 | // .replace(/U+00a0/, ' '); 11 | .trim(); 12 | -------------------------------------------------------------------------------- /src/scrape/index.ts: -------------------------------------------------------------------------------- 1 | export * from './scrape-servers.js'; 2 | -------------------------------------------------------------------------------- /src/scrape/scrape-servers.ts: -------------------------------------------------------------------------------- 1 | import scrapeIt from 'scrape-it'; 2 | import type { Server } from '../types.js'; 3 | import { cleanupText } from './_utils.js'; 4 | 5 | export const scrapeServers = async () => { 6 | const url = 'https://eco.ovhcloud.com/en/#filterType=range_element'; 7 | 8 | const { data, status } = await scrapeIt<{ servers: Server[] }>(url, { 9 | servers: { 10 | listItem: '[data-product=data-product]', 11 | data: { 12 | id: { 13 | attr: 'data-product-id', 14 | }, 15 | model: { 16 | selector: '[data-product-specs=""] > div', 17 | attr: 'data-plancode', 18 | }, 19 | provider: { 20 | selector: '.ods-badge--default', 21 | }, 22 | cpu: { 23 | selector: '[data-product-specs=""] > div > div:nth-child(1)', 24 | convert: (x: string) => 25 | cleanupText(x).replace('CPU:', '').split(' - ')[0]?.trim(), 26 | }, 27 | cpuCores: { 28 | selector: '[data-product-specs=""] > div > div:nth-child(1)', 29 | convert: (x: string) => 30 | cleanupText(x).replace('CPU:', '').split(' - ')[1]?.trim(), 31 | }, 32 | cpuFreq: { 33 | selector: '[data-product-specs=""] > div > div:nth-child(1)', 34 | convert: (x: string) => 35 | cleanupText(x).replace('CPU:', '').split(' - ')[2]?.trim(), 36 | }, 37 | ram: { 38 | selector: '[data-product-specs=""] > div > div:nth-child(2)', 39 | convert: (x: string) => cleanupText(x).replace('Memory:', '').trim(), 40 | }, 41 | disk: { 42 | selector: '[data-product-specs=""] > div > div:nth-child(3)', 43 | convert: (x: string) => cleanupText(x).replace('Storage:', '').trim(), 44 | }, 45 | network: { 46 | selector: '[data-product-specs=""] > div > div:nth-child(4)', 47 | convert: (x: string) => 48 | cleanupText(x).replace('Public bandwidth:', '').trim(), 49 | }, 50 | priceMonthly: { 51 | selector: '.ods-product-information-preview__price-1 .price-value', 52 | eq: 1, 53 | convert: (x: string) => Number(x.replace('$', '')), 54 | }, 55 | installationFee: { 56 | selector: '.ods-product-information-preview__fees .price-value', 57 | convert: (x: string) => Number(x.replace('$', '')), 58 | }, 59 | }, 60 | }, 61 | }); 62 | 63 | return { 64 | servers: data.servers, 65 | status, 66 | }; 67 | }; 68 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import type { Logger } from './logger.js'; 2 | 3 | export type Action = ({ 4 | content, 5 | logger, 6 | }: { 7 | content: string; 8 | logger: Logger; 9 | }) => Promise | void; 10 | 11 | export type Region = 'apac' | 'europe' | 'northAmerica'; 12 | 13 | export enum Datacenter { 14 | // North America 15 | BHS = 'bhs', 16 | HIL = 'hil', 17 | VIN = 'vin', 18 | // Europe 19 | FRA = 'fra', 20 | GRA = 'gra', 21 | LON = 'lon', 22 | RBX = 'rbx', 23 | SBG = 'sbg', 24 | WAW = 'waw', 25 | // APAC 26 | SGP = 'sgp', 27 | SYD = 'syd', 28 | } 29 | 30 | export type DatacenterAvailability = { 31 | availability: string; 32 | datacenter: Datacenter; 33 | }; 34 | 35 | export type HardwareAvailability = { 36 | region: Region; 37 | hardware: string; 38 | datacenters: DatacenterAvailability[]; 39 | }; 40 | 41 | export type Server = { 42 | id: string; 43 | model: string; 44 | provider: string; 45 | cpu: string; 46 | cpuCores: string; 47 | cpuFreq: string; 48 | ram: string; 49 | disk: string; 50 | network: string; 51 | priceMonthly: number; 52 | installationFee: number; 53 | }; 54 | 55 | export type ServerToCheck = { 56 | enable: boolean; 57 | name: string; 58 | price: string; 59 | ram: string; 60 | cpu: string; 61 | disk: string; 62 | network: string; 63 | // Datacenters: Datacenter[]; 64 | datacenters: string[]; 65 | }; 66 | 67 | export type ServersToCheck = Record; 68 | 69 | export type ServerAvailable = Pick< 70 | ServerToCheck, 71 | 'name' | 'cpu' | 'ram' | 'disk' | 'price' 72 | > & { 73 | availability: Array>; 74 | availableIn: Datacenter[]; 75 | code: string; 76 | datacentersAvailability: DatacenterAvailability[]; 77 | }; 78 | 79 | export type ServersAvailable = ServerAvailable[]; 80 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This function is helpful to filter out elements from an array (like when using `.map().filter(exists).map()`) 3 | * and have TS to return the correct type to the output of the filter 4 | * 5 | * More info and obtained from https://dev.to/icyjoseph/filtering-in-typescript-2mbc 6 | */ 7 | // eslint-disable-next-line @typescript-eslint/ban-types 8 | export function exists(value: T | null | undefined): value is T { 9 | return value === (value ?? !value); 10 | } 11 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "extends": "@tsconfig/node18-strictest/tsconfig.json", 4 | "compilerOptions": { 5 | "module": "es2022", 6 | "outDir": "./build", 7 | "noImplicitAny": true /* Enable error reporting for expressions and declarations with an implied `any` type.. */, 8 | "strictNullChecks": true /* When type checking, take into account `null` and `undefined`. */, 9 | "strictFunctionTypes": true /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */, 10 | "strictBindCallApply": true /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */, 11 | "strictPropertyInitialization": true /* Check for class properties that are declared but not set in the constructor. */, 12 | "noImplicitThis": true /* Enable error reporting when `this` is given the type `any`. */ 13 | // "useUnknownInCatchVariables": true /* Type catch clause variables as 'unknown' instead of 'any'. */, 14 | // "alwaysStrict": true /* Ensure 'use strict' is always emitted. */, 15 | // "noUnusedLocals": true /* Enable error reporting when a local variables aren't read. */, 16 | // "noUnusedParameters": true /* Raise an error when a function parameter isn't read */, 17 | // "exactOptionalPropertyTypes": true /* Interpret optional property types as written, rather than adding 'undefined'. */, 18 | // "noImplicitReturns": true /* Enable error reporting for codepaths that do not explicitly return in a function. */, 19 | // "noFallthroughCasesInSwitch": true /* Enable error reporting for fallthrough cases in switch statements. */, 20 | // "noUncheckedIndexedAccess": true /* Include 'undefined' in index signature results */, 21 | // "noImplicitOverride": true /* Ensure overriding members in derived classes are marked with an override modifier. */, 22 | // "noPropertyAccessFromIndexSignature": true /* Enforces using indexed accessors for keys declared using an indexed type */, 23 | // "allowUnusedLabels": true /* Disable error reporting for unused labels. */, 24 | // "allowUnreachableCode": true /* Disable error reporting for unreachable code. */ 25 | }, 26 | "include": ["src/**/*"] 27 | } 28 | --------------------------------------------------------------------------------