Merge pull request #2 from digitalocean-labs/linting-fixes

fix linting issues and rename files for clarity
This commit is contained in:
Dillon LeDoux 2025-09-08 20:12:34 -05:00 committed by GitHub
commit aeb95c3b24
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 529 additions and 213 deletions

View file

@ -4,9 +4,9 @@ import {
INodeProperties,
} from 'n8n-workflow';
export class DigitalOceanServerlessInference implements ICredentialType {
name = 'digitalOceanServerlessInference';
displayName = 'DigitalOcean Gradient™ AI Platform';
export class DigitalOceanServerlessInferenceApi implements ICredentialType {
name = 'digitalOceanServerlessInferenceApi';
displayName = 'DigitalOcean Gradient™ AI Platform API';
documentationUrl = 'https://docs.digitalocean.com/products/gradient-ai-platform/how-to/use-serverless-inference/';
properties: INodeProperties[] = [
{

View file

Before

Width:  |  Height:  |  Size: 5.7 KiB

After

Width:  |  Height:  |  Size: 5.7 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 2.7 KiB

After

Width:  |  Height:  |  Size: 2.7 KiB

Before After
Before After

View file

@ -21,7 +21,7 @@ export class ServerlessInference implements INodeType {
outputs: [NodeConnectionType.Main],
credentials: [
{
name: 'digitalOceanServerlessInference',
name: 'digitalOceanServerlessInferenceApi',
required: true,
},
],

View file

@ -17,7 +17,7 @@ export const textOperations: INodeProperties[] = [
{
name: 'Complete',
value: 'complete',
action: 'Create a Text Completion',
action: 'Create a text completion',
description: 'Create one or more completions for a given text',
routing: {
request: {
@ -37,8 +37,7 @@ const completeOperations: INodeProperties[] = [
displayName: 'Model',
name: 'model',
type: 'options',
description:
'The model which will generate the completion. <a href="https://docs.digitalocean.com/products/gradient-ai-platform/details/models/">Learn more</a>',
description: 'The model which will generate the completion. <a href="https://docs.digitalocean.com/products/gradient-ai-platform/details/models/">Learn more</a>.',
displayOptions: {
show: {
operation: ['complete'],
@ -84,7 +83,7 @@ const completeOperations: INodeProperties[] = [
property: 'model',
},
},
default: 'openai-gpt-oss-120b',
default: '',
},
{
displayName: 'Input Type',
@ -230,6 +229,69 @@ const sharedOperations: INodeProperties[] = [
},
},
options: [
{
displayName: 'Frequency Penalty',
name: 'frequencyPenalty',
description: 'Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far.',
type: 'number',
default: undefined,
typeOptions: {
maxValue: 2,
minValue: -2,
numberPrecision: 2,
},
routing: {
send: {
type: 'body',
property: 'frequency_penalty',
},
},
},
{
displayName: 'Logit Bias',
name: 'logitBias',
description: 'Modify the likelihood of specified tokens appearing in the completion (JSON object mapping token IDs to bias values)',
type: 'string',
default: '',
placeholder: '{"50256": -100}',
routing: {
send: {
type: 'body',
property: 'logit_bias',
value: '={{$parameter.logitBias ? JSON.parse($parameter.logitBias) : undefined}}',
},
},
},
{
displayName: 'Logprobs',
name: 'logprobs',
description: 'Whether to return log probabilities of the output tokens',
type: 'boolean',
default: false,
routing: {
send: {
type: 'body',
property: 'logprobs',
},
},
},
{
displayName: 'Max Completion Tokens',
name: 'maxCompletionTokens',
description:
'The maximum number of tokens that can be generated in the chat completion. This value can be used to control costs for text generated via API.',
type: 'number',
default: undefined,
typeOptions: {
minValue: 1,
},
routing: {
send: {
type: 'body',
property: 'max_completion_tokens',
},
},
},
{
displayName: 'Maximum Number of Tokens',
name: 'maxTokens',
@ -254,53 +316,17 @@ const sharedOperations: INodeProperties[] = [
},
},
{
displayName: 'Max Completion Tokens',
name: 'maxCompletionTokens',
description:
'The maximum number of tokens that can be generated in the chat completion. This value can be used to control costs for text generated via API.',
type: 'number',
default: undefined,
typeOptions: {
minValue: 1,
},
displayName: 'Metadata',
name: 'metadata',
description: 'Developer-defined metadata to attach to the completion (JSON object)',
type: 'string',
default: '',
placeholder: '{"purpose": "testing"}',
routing: {
send: {
type: 'body',
property: 'max_completion_tokens',
},
},
},
{
displayName: 'Temperature',
name: 'temperature',
default: 0.7,
typeOptions: { maxValue: 2, minValue: 0, numberPrecision: 2 },
description:
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
type: 'number',
routing: {
send: {
type: 'body',
property: 'temperature',
},
},
},
{
displayName: 'Top P',
name: 'topP',
description:
'An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass.',
type: 'number',
default: undefined,
typeOptions: {
maxValue: 1,
minValue: 0,
numberPrecision: 3,
},
routing: {
send: {
type: 'body',
property: 'top_p',
property: 'metadata',
value: '={{$parameter.metadata ? JSON.parse($parameter.metadata) : undefined}}',
},
},
},
@ -321,10 +347,43 @@ const sharedOperations: INodeProperties[] = [
},
},
},
{
displayName: 'Presence Penalty',
name: 'presencePenalty',
description: 'Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far.',
type: 'number',
default: undefined,
typeOptions: {
maxValue: 2,
minValue: -2,
numberPrecision: 2,
},
routing: {
send: {
type: 'body',
property: 'presence_penalty',
},
},
},
{
displayName: 'Stop Sequences',
name: 'stop',
description: 'Up to 4 sequences where the API will stop generating further tokens',
type: 'string',
default: '',
placeholder: 'e.g. \\n, Human:, AI:',
routing: {
send: {
type: 'body',
property: 'stop',
value: '={{$parameter.stop ? $parameter.stop.split(",").map(s => s.trim()) : undefined}}',
},
},
},
{
displayName: 'Stream',
name: 'stream',
description: 'If set, partial message deltas will be sent, like in ChatGPT',
description: 'Whether partial message deltas will be sent, like in ChatGPT',
type: 'boolean',
default: false,
routing: {
@ -349,7 +408,7 @@ const sharedOperations: INodeProperties[] = [
{
displayName: 'Include Usage',
name: 'includeUsage',
description: 'If set, an additional chunk will be streamed before the data: [DONE] message',
description: 'Whether to include an additional chunk before the data: [DONE] message',
type: 'boolean',
default: false,
},
@ -363,133 +422,71 @@ const sharedOperations: INodeProperties[] = [
},
},
{
displayName: 'Stop Sequences',
name: 'stop',
description: 'Up to 4 sequences where the API will stop generating further tokens',
displayName: 'Temperature',
name: 'temperature',
default: 0.7,
typeOptions: { maxValue: 2, minValue: 0, numberPrecision: 2 },
description:
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
type: 'number',
routing: {
send: {
type: 'body',
property: 'temperature',
},
},
},
{
displayName: 'Tool Choice',
name: 'toolChoice',
description: 'Controls which (if any) tool is called by the model',
type: 'options',
options: [
{
name: 'Auto',
value: 'auto',
description: 'The model can pick between generating a message or calling one or more tools',
},
{
name: 'None',
value: 'none',
description: 'The model will not call any tool and instead generates a message',
},
{
name: 'Required',
value: 'required',
description: 'The model must call one or more tools',
},
{
name: 'Function',
value: 'function',
description: 'Specifies a particular tool via {"type": "function", "function": {"name": "my_function"}}',
},
],
default: 'auto',
routing: {
send: {
type: 'body',
property: 'tool_choice',
},
},
},
{
displayName: 'Tool Choice Function Name',
name: 'toolChoiceFunctionName',
description: 'The name of the function to call when tool choice is set to function',
type: 'string',
default: '',
placeholder: 'e.g. \\n, Human:, AI:',
routing: {
send: {
type: 'body',
property: 'stop',
value: '={{$parameter.stop ? $parameter.stop.split(",").map(s => s.trim()) : undefined}}',
},
},
},
{
displayName: 'Presence Penalty',
name: 'presencePenalty',
description:
'Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far',
type: 'number',
default: undefined,
typeOptions: {
maxValue: 2,
minValue: -2,
numberPrecision: 2,
},
routing: {
send: {
type: 'body',
property: 'presence_penalty',
},
},
},
{
displayName: 'Frequency Penalty',
name: 'frequencyPenalty',
description:
'Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far',
type: 'number',
default: undefined,
typeOptions: {
maxValue: 2,
minValue: -2,
numberPrecision: 2,
},
routing: {
send: {
type: 'body',
property: 'frequency_penalty',
},
},
},
{
displayName: 'Logprobs',
name: 'logprobs',
description: 'Whether to return log probabilities of the output tokens',
type: 'boolean',
default: false,
routing: {
send: {
type: 'body',
property: 'logprobs',
},
},
},
{
displayName: 'Top Logprobs',
name: 'topLogprobs',
description: 'An integer between 0 and 20 specifying the number of most likely tokens to return at each token position',
type: 'number',
default: undefined,
displayOptions: {
show: {
logprobs: [true],
toolChoice: ['function'],
},
},
typeOptions: {
minValue: 0,
maxValue: 20,
},
routing: {
send: {
type: 'body',
property: 'top_logprobs',
},
},
},
{
displayName: 'User Identifier',
name: 'user',
description: 'A unique identifier representing your end-user, which can help monitor and detect abuse',
type: 'string',
default: '',
routing: {
send: {
type: 'body',
property: 'user',
},
},
},
{
displayName: 'Logit Bias',
name: 'logitBias',
description: 'Modify the likelihood of specified tokens appearing in the completion (JSON object mapping token IDs to bias values)',
type: 'string',
default: '',
placeholder: '{"50256": -100}',
routing: {
send: {
type: 'body',
property: 'logit_bias',
value: '={{$parameter.logitBias ? JSON.parse($parameter.logitBias) : undefined}}',
},
},
},
{
displayName: 'Metadata',
name: 'metadata',
description: 'Developer-defined metadata to attach to the completion (JSON object)',
type: 'string',
default: '',
placeholder: '{"purpose": "testing"}',
routing: {
send: {
type: 'body',
property: 'metadata',
value: '={{$parameter.metadata ? JSON.parse($parameter.metadata) : undefined}}',
property: 'tool_choice',
value: '={{"type": "function", "function": {"name": $parameter.toolChoiceFunctionName}}}',
},
},
},
@ -553,56 +550,55 @@ const sharedOperations: INodeProperties[] = [
},
},
{
displayName: 'Tool Choice',
name: 'toolChoice',
description: 'Controls which (if any) tool is called by the model',
type: 'options',
options: [
{
name: 'Auto',
value: 'auto',
description: 'The model can pick between generating a message or calling one or more tools',
displayName: 'Top Logprobs',
name: 'topLogprobs',
description: 'An integer between 0 and 20 specifying the number of most likely tokens to return at each token position',
type: 'number',
default: undefined,
displayOptions: {
show: {
logprobs: [true],
},
{
name: 'None',
value: 'none',
description: 'The model will not call any tool and instead generates a message',
},
{
name: 'Required',
value: 'required',
description: 'The model must call one or more tools',
},
{
name: 'Function',
value: 'function',
description: 'Specifies a particular tool via {"type": "function", "function": {"name": "my_function"}}',
},
],
default: 'auto',
},
typeOptions: {
minValue: 0,
maxValue: 20,
},
routing: {
send: {
type: 'body',
property: 'tool_choice',
property: 'top_logprobs',
},
},
},
{
displayName: 'Tool Choice Function Name',
name: 'toolChoiceFunctionName',
description: 'The name of the function to call when tool choice is set to function',
type: 'string',
default: '',
displayOptions: {
show: {
toolChoice: ['function'],
},
displayName: 'Top P',
name: 'topP',
description: 'An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass',
type: 'number',
default: undefined,
typeOptions: {
maxValue: 1,
minValue: 0,
numberPrecision: 3,
},
routing: {
send: {
type: 'body',
property: 'tool_choice',
value: '={{"type": "function", "function": {"name": $parameter.toolChoiceFunctionName}}}',
property: 'top_p',
},
},
},
{
displayName: 'User Identifier',
name: 'user',
description: 'A unique identifier representing your end-user, which can help monitor and detect abuse',
type: 'string',
default: '',
routing: {
send: {
type: 'body',
property: 'user',
},
},
},

View file

@ -1,5 +1,5 @@
{
"name": "n8n-node-digitalocean-gradient-serverless-inference",
"name": "@digitalocean/n8n-nodes-digitalocean-gradient-serverless-inference",
"version": "1.0.0",
"description": "This is an n8n community node for the DigitalOcean Gradient™ AI Platform Serverless Inference API",
"keywords": [
@ -13,7 +13,7 @@
},
"repository": {
"type": "git",
"url": "https://github.com/digitalocean-labs/n8n-node-gradient-serverless-inference.git"
"url": "git+https://github.com/digitalocean-labs/n8n-node-gradient-serverless-inference.git"
},
"engines": {
"node": ">=20.15"
@ -49,5 +49,325 @@
},
"peerDependencies": {
"n8n-workflow": "*"
},
"dependencies": {
"acorn": "^8.15.0",
"acorn-jsx": "^5.3.2",
"ajv": "^6.12.6",
"ansi-regex": "^5.0.1",
"ansi-styles": "^4.3.0",
"anymatch": "^3.1.3",
"argparse": "^2.0.1",
"array-each": "^1.0.1",
"array-slice": "^1.1.0",
"array-union": "^2.1.0",
"assert": "^2.1.0",
"ast-types": "^0.15.2",
"async-done": "^2.0.0",
"async-settle": "^2.0.0",
"asynckit": "^0.4.0",
"available-typed-arrays": "^1.0.7",
"axios": "^1.8.2",
"b4a": "^1.6.7",
"bach": "^2.0.1",
"balanced-match": "^1.0.2",
"bare-events": "^2.6.1",
"base64-js": "^1.5.1",
"binary-extensions": "^2.3.0",
"bl": "^5.1.0",
"brace-expansion": "^2.0.2",
"braces": "^3.0.3",
"buffer": "^6.0.3",
"call-bind": "^1.0.8",
"call-bind-apply-helpers": "^1.0.2",
"call-bound": "^1.0.4",
"callsites": "^3.1.0",
"camel-case": "^4.1.2",
"chalk": "^4.1.2",
"charenc": "^0.0.2",
"chokidar": "^3.6.0",
"cliui": "^7.0.4",
"clone": "^2.1.2",
"color-convert": "^2.0.1",
"color-name": "^1.1.4",
"combined-stream": "^1.0.8",
"concat-map": "^0.0.1",
"convert-source-map": "^2.0.0",
"copy-props": "^4.0.0",
"cross-spawn": "^7.0.6",
"crypt": "^0.0.2",
"debug": "^4.4.1",
"deep-equal": "^2.2.0",
"deep-is": "^0.1.4",
"define-data-property": "^1.1.4",
"define-properties": "^1.2.1",
"delayed-stream": "^1.0.0",
"detect-file": "^1.0.0",
"dir-glob": "^3.0.1",
"doctrine": "^3.0.0",
"dunder-proto": "^1.0.1",
"each-props": "^3.0.0",
"emoji-regex": "^8.0.0",
"end-of-stream": "^1.4.5",
"es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
"es-get-iterator": "^1.1.3",
"es-object-atoms": "^1.1.1",
"escalade": "^3.2.0",
"escape-string-regexp": "^4.0.0",
"eslint-config-riot": "^1.0.0",
"eslint-plugin-local": "^1.0.0",
"eslint-scope": "^7.2.2",
"eslint-visitor-keys": "^4.2.1",
"espree": "^9.6.1",
"esprima": "^4.0.1",
"esprima-next": "^5.8.4",
"esquery": "^1.6.0",
"esrecurse": "^4.3.0",
"estraverse": "^5.3.0",
"esutils": "^2.0.3",
"expand-tilde": "^2.0.2",
"extend": "^3.0.2",
"fast-deep-equal": "^3.1.3",
"fast-fifo": "^1.3.2",
"fast-glob": "^3.3.3",
"fast-json-stable-stringify": "^2.1.0",
"fast-levenshtein": "^2.0.6",
"fastest-levenshtein": "^1.0.16",
"fastq": "^1.19.1",
"file-entry-cache": "^6.0.1",
"fill-range": "^7.1.1",
"find-up": "^5.0.0",
"findup-sync": "^5.0.0",
"fined": "^2.0.0",
"flagged-respawn": "^2.0.0",
"flat-cache": "^3.2.0",
"flatted": "^3.3.3",
"follow-redirects": "^1.15.11",
"for-each": "^0.3.5",
"for-in": "^1.0.2",
"for-own": "^1.0.0",
"form-data": "^4.0.0",
"fs-mkdirp-stream": "^2.0.1",
"fs.realpath": "^1.0.0",
"function-bind": "^1.1.2",
"functions-have-names": "^1.2.3",
"get-caller-file": "^2.0.5",
"get-intrinsic": "^1.3.0",
"get-proto": "^1.0.1",
"glob": "^7.2.3",
"glob-parent": "^6.0.2",
"glob-stream": "^8.0.3",
"glob-watcher": "^6.0.0",
"global-modules": "^1.0.0",
"global-prefix": "^1.0.2",
"globals": "^13.24.0",
"globby": "^11.1.0",
"glogg": "^2.2.0",
"gopd": "^1.2.0",
"graceful-fs": "^4.2.11",
"graphemer": "^1.4.0",
"gulp-cli": "^3.1.0",
"gulplog": "^2.2.0",
"has-bigints": "^1.1.0",
"has-flag": "^4.0.0",
"has-property-descriptors": "^1.0.2",
"has-symbols": "^1.1.0",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.2",
"homedir-polyfill": "^1.0.3",
"iconv-lite": "^0.6.3",
"ieee754": "^1.2.1",
"ignore": "^5.3.2",
"import-fresh": "^3.3.1",
"imurmurhash": "^0.1.4",
"indefinite": "^2.5.2",
"inflight": "^1.0.6",
"inherits": "^2.0.4",
"ini": "^1.3.8",
"internal-slot": "^1.1.0",
"interpret": "^3.1.1",
"is-absolute": "^1.0.0",
"is-arguments": "^1.2.0",
"is-array-buffer": "^3.0.5",
"is-bigint": "^1.1.0",
"is-binary-path": "^2.1.0",
"is-boolean-object": "^1.2.2",
"is-buffer": "^1.1.6",
"is-callable": "^1.2.7",
"is-core-module": "^2.16.1",
"is-date-object": "^1.1.0",
"is-extglob": "^2.1.1",
"is-fullwidth-code-point": "^3.0.0",
"is-generator-function": "^1.1.0",
"is-glob": "^4.0.3",
"is-map": "^2.0.3",
"is-nan": "^1.3.2",
"is-negated-glob": "^1.0.0",
"is-number": "^7.0.0",
"is-number-object": "^1.1.1",
"is-path-inside": "^3.0.3",
"is-plain-object": "^5.0.0",
"is-regex": "^1.2.1",
"is-relative": "^1.0.0",
"is-set": "^2.0.3",
"is-shared-array-buffer": "^1.0.4",
"is-string": "^1.1.1",
"is-symbol": "^1.1.1",
"is-typed-array": "^1.1.15",
"is-unc-path": "^1.0.0",
"is-valid-glob": "^1.0.0",
"is-weakmap": "^2.0.2",
"is-weakset": "^2.0.4",
"is-windows": "^1.0.2",
"isarray": "^2.0.5",
"isexe": "^2.0.0",
"isobject": "^3.0.1",
"jmespath": "^0.16.0",
"js-base64": "^3.7.2",
"js-yaml": "^4.1.0",
"json-buffer": "^3.0.1",
"json-schema-traverse": "^0.4.1",
"json-stable-stringify-without-jsonify": "^1.0.1",
"jssha": "^3.3.1",
"keyv": "^4.5.4",
"last-run": "^2.0.0",
"lead": "^4.0.0",
"levn": "^0.4.1",
"liftoff": "^5.0.1",
"locate-path": "^6.0.0",
"lodash": "^4.17.21",
"lodash.merge": "^4.6.2",
"lower-case": "^2.0.2",
"luxon": "^3.4.4",
"map-cache": "^0.2.2",
"math-intrinsics": "^1.1.0",
"md5": "^2.3.0",
"merge2": "^1.4.1",
"micromatch": "^4.0.8",
"mime-db": "^1.52.0",
"mime-types": "^2.1.35",
"minimatch": "^9.0.5",
"ms": "^2.1.3",
"mute-stdout": "^2.0.0",
"n8n-workflow": "^1.82.0",
"natural-compare": "^1.4.0",
"no-case": "^3.0.4",
"normalize-path": "^3.0.0",
"now-and-later": "^3.0.0",
"object-inspect": "^1.13.4",
"object-is": "^1.1.6",
"object-keys": "^1.1.1",
"object.assign": "^4.1.7",
"object.defaults": "^1.1.0",
"object.pick": "^1.3.0",
"once": "^1.4.0",
"optionator": "^0.9.4",
"p-limit": "^3.1.0",
"p-locate": "^5.0.0",
"parent-module": "^1.0.1",
"parse-filepath": "^1.0.2",
"parse-passwd": "^1.0.0",
"pascal-case": "^3.1.2",
"path-exists": "^4.0.0",
"path-is-absolute": "^1.0.1",
"path-key": "^3.1.1",
"path-parse": "^1.0.7",
"path-root": "^0.1.1",
"path-root-regex": "^0.1.2",
"path-type": "^4.0.0",
"picomatch": "^2.3.1",
"pluralize": "^8.0.0",
"possible-typed-array-names": "^1.1.0",
"prelude-ls": "^1.2.1",
"proxy-from-env": "^1.1.0",
"punycode": "^2.3.1",
"queue-microtask": "^1.2.3",
"readable-stream": "^3.6.2",
"readdirp": "^3.6.0",
"recast": "^0.21.5",
"rechoir": "^0.8.0",
"regexp.prototype.flags": "^1.5.4",
"remove-trailing-separator": "^1.1.0",
"replace-ext": "^2.0.0",
"replace-homedir": "^2.0.0",
"require-directory": "^2.1.1",
"resolve": "^1.22.10",
"resolve-dir": "^1.0.1",
"resolve-from": "^4.0.0",
"resolve-options": "^2.0.0",
"reusify": "^1.1.0",
"rimraf": "^3.0.2",
"run-parallel": "^1.2.0",
"safe-buffer": "^5.2.1",
"safe-regex-test": "^1.1.0",
"safer-buffer": "^2.1.2",
"sax": "^1.4.1",
"semver": "^7.7.2",
"semver-greatest-satisfied-range": "^2.0.0",
"sentence-case": "^3.0.4",
"set-function-length": "^1.2.2",
"set-function-name": "^2.0.2",
"shebang-command": "^2.0.0",
"shebang-regex": "^3.0.0",
"side-channel": "^1.1.0",
"side-channel-list": "^1.0.0",
"side-channel-map": "^1.0.1",
"side-channel-weakmap": "^1.0.2",
"slash": "^3.0.0",
"source-map": "^0.6.1",
"sparkles": "^2.1.0",
"stop-iteration-iterator": "^1.1.0",
"stream-composer": "^1.0.2",
"stream-exhaust": "^1.0.2",
"streamx": "^2.22.1",
"string-width": "^4.2.3",
"string_decoder": "^1.3.0",
"strip-ansi": "^6.0.1",
"strip-json-comments": "^3.1.1",
"supports-color": "^7.2.0",
"supports-preserve-symlinks-flag": "^1.0.0",
"sver": "^1.8.4",
"teex": "^1.0.1",
"text-decoder": "^1.2.3",
"text-table": "^0.2.0",
"title-case": "^3.0.3",
"to-regex-range": "^5.0.1",
"to-through": "^3.0.0",
"transliteration": "^2.3.5",
"ts-api-utils": "^2.1.0",
"tslib": "^2.8.1",
"type-check": "^0.4.0",
"type-fest": "^0.20.2",
"unc-path-regex": "^0.1.2",
"undertaker": "^2.0.0",
"undertaker-registry": "^2.0.0",
"upper-case-first": "^2.0.2",
"uri-js": "^4.4.1",
"util": "^0.12.5",
"util-deprecate": "^1.0.2",
"v8flags": "^4.0.1",
"value-or-function": "^4.0.0",
"vinyl": "^3.0.1",
"vinyl-contents": "^2.0.0",
"vinyl-fs": "^4.0.2",
"vinyl-sourcemap": "^2.0.0",
"which": "^2.0.2",
"which-boxed-primitive": "^1.1.1",
"which-collection": "^1.0.2",
"which-typed-array": "^1.1.19",
"word-wrap": "^1.2.5",
"wrap-ansi": "^7.0.0",
"wrappy": "^1.0.2",
"xml2js": "^0.6.2",
"xmlbuilder": "^11.0.1",
"y18n": "^5.0.8",
"yargs": "^16.2.0",
"yargs-parser": "^20.2.9",
"yocto-queue": "^0.1.0",
"zod": "^3.24.1"
},
"bugs": {
"url": "https://github.com/digitalocean-labs/n8n-node-gradient-serverless-inference/issues"
}
}
}