Add trailing commas

and update dependencies
This commit is contained in:
Boris Verkhovskiy 2024-01-13 17:29:29 -08:00
parent f26555a32e
commit 3faec3a125
62 changed files with 1271 additions and 892 deletions

View File

@ -1 +1 @@
{ "trailingComma": "es5" }
{}

1248
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -26,34 +26,34 @@
"dependencies": {
"@curlconverter/tree-sitter-bash": "^0.0.3",
"jsesc": "^3.0.2",
"lossless-json": "^2.0.11",
"tree-sitter": "^0.20.5",
"lossless-json": "^4.0.1",
"tree-sitter": "^0.20.6",
"web-tree-sitter": "^0.20.8",
"yamljs": "^0.3.0"
},
"devDependencies": {
"@types/diff": "^5.0.7",
"@types/diff": "^5.0.9",
"@types/glob": "^8.1.0",
"@types/har-format": "^1.2.14",
"@types/jsesc": "^3.0.2",
"@types/nunjucks": "^3.2.5",
"@types/tape": "^5.6.3",
"@types/yamljs": "^0.2.33",
"@types/yargs": "^17.0.29",
"@typescript-eslint/eslint-plugin": "^6.9.0",
"@typescript-eslint/parser": "^6.9.0",
"c8": "^8.0.1",
"@types/har-format": "^1.2.15",
"@types/jsesc": "^3.0.3",
"@types/nunjucks": "^3.2.6",
"@types/tape": "^5.6.4",
"@types/yamljs": "^0.2.34",
"@types/yargs": "^17.0.32",
"@typescript-eslint/eslint-plugin": "^6.18.1",
"@typescript-eslint/parser": "^6.18.1",
"c8": "^9.1.0",
"colors": "^1.4.0",
"diff": "^5.1.0",
"eslint": "^8.52.0",
"eslint-config-prettier": "^9.0.0",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"glob": "^10.3.10",
"husky": "^8.0.3",
"lint-staged": "^15.0.2",
"prettier": "^3.0.3",
"tape": "^5.7.2",
"lint-staged": "^15.2.0",
"prettier": "^3.2.1",
"tape": "^5.7.3",
"tree-sitter-cli": "^0.20.8",
"typescript": "^5.2.2",
"typescript": "^5.3.3",
"yargs": "^17.7.2"
},
"scripts": {

View File

@ -47,10 +47,10 @@ const COMMA_SEPARATED = new Set(
"Upgrade",
"Via",
"Warning",
].map((h) => h.toLowerCase())
].map((h) => h.toLowerCase()),
);
const SEMICOLON_SEPARATED = new Set(
["Content-Type", "Cookie", "Prefer"].map((h) => h.toLowerCase())
["Content-Type", "Cookie", "Prefer"].map((h) => h.toLowerCase()),
);
export class Headers implements Iterable<[Word, Word | null]> {
@ -152,7 +152,7 @@ export class Headers implements Iterable<[Word, Word | null]> {
if (mergeChar) {
const merged = joinWords(
nonEmptyHeaders.map((h) => h[1]) as Word[],
mergeChar
mergeChar,
);
warnings.push([
"repeated-header",

View File

@ -171,7 +171,7 @@ function buildURL(
uploadFile?: Word,
outputFile?: Word,
stdin?: Word,
stdinFile?: Word
stdinFile?: Word,
): RequestUrl {
const originalUrl = url;
const u = parseurl(global, config, url);
@ -200,7 +200,7 @@ function buildURL(
u.host,
u.path,
u.query,
u.fragment
u.fragment,
);
// curl example.com example.com?foo=bar --url-query isshared=t
@ -252,7 +252,7 @@ function buildURL(
[queryArray, queryStr, queryStrReadsFile] = buildData(
queryParts,
stdin,
stdinFile
stdinFile,
);
urlQueryArray = queryArray;
}
@ -261,7 +261,7 @@ function buildURL(
[queryArray, queryStr, queryStrReadsFile] = buildData(
queryParts,
stdin,
stdinFile
stdinFile,
);
}
@ -281,13 +281,13 @@ function buildURL(
"://",
u.host,
u.path,
u.fragment
u.fragment,
);
url = mergeWords(u.scheme, "://", u.host, u.path, u.query, u.fragment);
let urlWithoutQueryList = url;
// TODO: parseQueryString() doesn't accept leading '?'
let [queryList, queryDict] = parseQueryString(
u.query.toBool() ? u.query.slice(1) : new Word()
u.query.toBool() ? u.query.slice(1) : new Word(),
);
if (queryList && queryList.length) {
// TODO: remove the fragment too?
@ -296,7 +296,7 @@ function buildURL(
"://",
u.host,
u.path,
u.fragment
u.fragment,
);
} else {
queryList = null;
@ -399,7 +399,7 @@ function buildURL(
function buildData(
configData: SrcDataParam[],
stdin?: Word,
stdinFile?: Word
stdinFile?: Word,
): [DataParam[], Word, string | null] {
const data: DataParam[] = [];
let dataStrState = new Word();
@ -448,7 +448,7 @@ function buildData(
case "urlencode":
value = mergeWords(
name && name.length ? name.append("=") : new Word(),
percentEncodePlus(stdin)
percentEncodePlus(stdin),
);
break;
default:
@ -498,7 +498,7 @@ function buildData(
return d.filename.prepend("@");
}
return d;
})
}),
);
return [data, dataStr, dataStrReadsFile];
@ -508,7 +508,7 @@ function buildRequest(
global: GlobalConfig,
config: OperationConfig,
stdin?: Word,
stdinFile?: Word
stdinFile?: Word,
): Request {
if (!config.url || !config.url.length) {
// TODO: better error message (could be parsing fail)
@ -600,7 +600,7 @@ function buildRequest(
[data, dataStr, dataStrReadsFile] = buildData(
config.data,
stdin,
stdinFile
stdinFile,
);
}
}
@ -620,8 +620,8 @@ function buildRequest(
uploadFiles[i],
outputFiles[i],
stdin,
stdinFile
)
stdinFile,
),
);
}
// --get moves --data into the URL's query string
@ -716,7 +716,7 @@ function buildRequest(
// TODO: remove these
request.isDataRaw = false;
request.isDataBinary = (data || []).some(
(d) => !(d instanceof Word) && d.filetype === "binary"
(d) => !(d instanceof Word) && d.filetype === "binary",
);
}
if (queryArray) {
@ -916,21 +916,21 @@ function buildRequest(
export function buildRequests(
global: GlobalConfig,
stdin?: Word,
stdinFile?: Word
stdinFile?: Word,
): Request[] {
if (!global.configs.length) {
// shouldn't happen
warnf(global, ["no-configs", "got empty config object"]);
}
return global.configs.map((config) =>
buildRequest(global, config, stdin, stdinFile)
buildRequest(global, config, stdin, stdinFile),
);
}
export function getFirst(
requests: Request[],
warnings: Warnings,
support?: Support
support?: Support,
): Request {
if (requests.length > 1) {
warnings.push([

View File

@ -10,7 +10,7 @@ export function warnf(global: GlobalConfig, warning: [string, string]) {
export function underlineNode(
node: Parser.SyntaxNode,
curlCommand?: string
curlCommand?: string,
): string {
// doesn't include leading whitespace
const command = node.tree.rootNode;
@ -49,7 +49,7 @@ export function underlineNode(
export function underlineNodeEnd(
node: Parser.SyntaxNode,
curlCommand?: string
curlCommand?: string,
): string {
// doesn't include leading whitespace
const command = node.tree.rootNode;
@ -95,7 +95,7 @@ export interface Support {
export function warnIfPartsIgnored(
request: Request,
warnings: Warnings,
support?: Support
support?: Support,
) {
if (request.urls.length > 1 && !support?.multipleUrls) {
warnings.push([

View File

@ -295,7 +295,7 @@ try {
// TODO: warn about unsupported arguments once we know
// which language we're converting to
undefined,
warnings
warnings,
);
} catch (e) {
exitWithError(e);
@ -319,9 +319,9 @@ if (!has(translate, language)) {
JSON.stringify(language) +
"\n" +
"must be one of: " +
Object.keys(translate).sort().join(", ")
Object.keys(translate).sort().join(", "),
),
verbose
verbose,
);
}
@ -357,9 +357,9 @@ if (commandFromStdin) {
// they aren't stuck with what looks like a hung terminal.
exitWithError(
new CCError(
"if you pass --stdin or -, you can't also pass " + extraArgsStr
"if you pass --stdin or -, you can't also pass " + extraArgsStr,
),
verbose
verbose,
);
}
const input = fs.readFileSync(0, "utf8");

View File

@ -34,7 +34,7 @@ function parseDetails(
p: Word,
ptr: number,
supported: Supported,
warnings: Warnings
warnings: Warnings,
): FormParamPrototype {
while (ptr < p.length && p.charAt(ptr) === ";") {
ptr += 1;
@ -114,7 +114,7 @@ function isSpace(c: Token): boolean {
function getParamWord(
p: Word,
start: number,
warnings: Warnings
warnings: Warnings,
): [Word, number] {
let ptr = start;
if (p.charAt(ptr) === '"') {
@ -163,7 +163,7 @@ function getParamPart(
p: Word,
ptr: number,
supported: Supported,
warnings: Warnings
warnings: Warnings,
): FormParamPrototype {
while (ptr < p.length && isSpace(p.charAt(ptr))) {
ptr += 1;
@ -179,7 +179,7 @@ function getParamPart(
// -F is a complicated option to parse.
export function parseForm(
form: SrcFormParam[],
warnings: Warnings
warnings: Warnings,
): FormParam[] {
const multipartUploads = [];
let depth = 0;
@ -189,7 +189,7 @@ export function parseForm(
if (!multipartArgument.value.includes("=")) {
throw new CCError(
'invalid value for --form/-F, missing "=": ' +
JSON.stringify(multipartArgument.value.toString())
JSON.stringify(multipartArgument.value.toString()),
);
}
const [name, value] = multipartArgument.value.split("=", 2);
@ -207,14 +207,14 @@ export function parseForm(
{
headers: true,
},
warnings
warnings,
);
} else if (!isString && name.length === 0 && eq(value, ")")) {
depth -= 1;
if (depth < 0) {
throw new CCError(
"no multipart to terminate: " +
JSON.stringify(multipartArgument.value.toString())
JSON.stringify(multipartArgument.value.toString()),
);
}
} else if (!isString && value.charAt(0) === "@") {
@ -228,7 +228,7 @@ export function parseForm(
encoder: true,
headers: true,
},
warnings
warnings,
);
formParam.contentFile = formParam.content;
@ -251,7 +251,7 @@ export function parseForm(
encoder: true,
headers: true,
},
warnings
warnings,
);
formParam.contentFile = formParam.content;
delete formParam.content;
@ -274,7 +274,7 @@ export function parseForm(
encoder: true,
headers: true,
},
warnings
warnings,
);
}
}

View File

@ -472,7 +472,7 @@ for (const [opt, val] of Object.entries(curlLongOpts)) {
if (
!Object.prototype.hasOwnProperty.call(
curlLongOptsShortened,
shortenedOpt
shortenedOpt,
)
) {
if (!Object.prototype.hasOwnProperty.call(curlLongOpts, shortenedOpt)) {
@ -966,7 +966,7 @@ function checkSupported(
global: GlobalConfig,
lookup: string,
longArg: LongShort,
supportedOpts?: Set<string>
supportedOpts?: Set<string>,
) {
if (supportedOpts && !supportedOpts.has(longArg.name)) {
// TODO: better message. include generator name?
@ -982,7 +982,7 @@ function checkSupported(
export function pushProp<Type>(
obj: { [key: string]: Type[] },
prop: string,
value: Type
value: Type,
) {
if (!Object.prototype.hasOwnProperty.call(obj, prop)) {
obj[prop] = [];
@ -995,7 +995,7 @@ function pushArgValue(
global: GlobalConfig,
config: OperationConfig,
argName: string,
value: Word
value: Word,
) {
// Note: cli.ts assumes that the property names on OperationConfig
// are the same as the passed in argument in an error message, so
@ -1075,7 +1075,7 @@ function setArgValue(
global: GlobalConfig,
config: OperationConfig,
argName: string,
toggle: boolean
toggle: boolean,
): OperationConfig {
switch (argName) {
case "digest":
@ -1172,7 +1172,7 @@ export function parseArgs(
shortenedLongOpts: LongOpts = curlLongOptsShortened,
shortOpts: ShortOpts = curlShortOpts,
supportedOpts?: Set<string>,
warnings: Warnings = []
warnings: Warnings = [],
): GlobalConfig {
let config: OperationConfig = { authtype: CURLAUTH_BASIC };
const global: GlobalConfig = { configs: [config], warnings };
@ -1195,7 +1195,7 @@ export function parseArgs(
" could " +
(shellToken.type === "command" ? "return" : "be") +
" anything\n" +
underlineNode(shellToken.syntaxNode)
underlineNode(shellToken.syntaxNode),
);
}
const argStr = arg.toString();
@ -1225,7 +1225,7 @@ export function parseArgs(
global,
config,
longArg.name,
toBoolean(argStr.slice(2))
toBoolean(argStr.slice(2)),
); // TODO: all shortened args work correctly?
}
@ -1253,7 +1253,7 @@ export function parseArgs(
global,
config,
longArg.name,
toBoolean(shortFor)
toBoolean(shortFor),
);
} else {
throw new CCError("option -: is unknown");
@ -1270,13 +1270,13 @@ export function parseArgs(
" could " +
(jthChar.type === "command" ? "return" : "be") +
" anything\n" +
underlineNode(jthChar.syntaxNode)
underlineNode(jthChar.syntaxNode),
);
}
if (!has(shortOpts, jthChar)) {
if (has(changedShortOpts, jthChar)) {
throw new CCError(
"option " + arg + ": " + changedShortOpts[jthChar]
"option " + arg + ": " + changedShortOpts[jthChar],
);
}
// TODO: there are a few deleted short options we could report
@ -1300,7 +1300,7 @@ export function parseArgs(
val = args[i];
} else {
throw new CCError(
"option " + arg.toString() + ": requires parameter"
"option " + arg.toString() + ": requires parameter",
);
}
pushArgValue(global, config, longArg.name, val);
@ -1311,7 +1311,7 @@ export function parseArgs(
global,
config,
longArg.name,
toBoolean(shortFor)
toBoolean(shortFor),
);
}
if (lookup) {

View File

@ -98,7 +98,7 @@ function warnAboutGlobs(global: GlobalConfig, url: string) {
export function parseurl(
global: GlobalConfig,
config: OperationConfig,
url: Word
url: Word,
): Curl_URL {
// This is curl's parseurl()
// https://github.com/curl/curl/blob/curl-7_85_0/lib/urlapi.c#L1144

View File

@ -105,7 +105,7 @@ type AnsibleURI = {
function getDataString(
request: Request,
warnings: Warnings
warnings: Warnings,
): [Body, BodyFormat] | [string, "src"] | undefined {
if (!request.data || !request.data.isString()) {
return;
@ -151,7 +151,7 @@ function getDataString(
Array.isArray(q[1])
? q[1].map((qv) => qv.toString())
: q[1].toString(),
])
]),
),
"form-urlencoded",
];
@ -168,7 +168,7 @@ function getDataString(
export function _toAnsible(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
// Only supported if there's one file and nothing else
const request = getFirst(requests, warnings, { dataReadsFile: true });
@ -348,12 +348,12 @@ export function _toAnsible(
},
],
100,
2
2,
);
}
export function toAnsibleWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const ansible = _toAnsible(requests, warnings);

View File

@ -143,7 +143,7 @@ export function _toCFML(requests: Request[], warnings: Warnings = []): string {
export function toCFMLWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const cfml = _toCFML(requests, warnings);

View File

@ -93,7 +93,7 @@ function reprQueryDict(query: QueryDict, importLines: Set<string>): string {
const key = q[0].toString();
if (!q[0].isString() || !safeAsKeyword(key)) {
throw new CCError(
"can't use query key as Clojure keyword: " + JSON.stringify(key)
"can't use query key as Clojure keyword: " + JSON.stringify(key),
);
}
return (
@ -117,7 +117,7 @@ function reprQueryList(query: QueryList, importLines: Set<string>): string {
query
.map(
(q) =>
"[" + repr(q[0], importLines) + " " + repr(q[1], importLines) + "]"
"[" + repr(q[0], importLines) + " " + repr(q[1], importLines) + "]",
)
.join("\n ") +
"]"
@ -127,7 +127,7 @@ function reprQueryList(query: QueryList, importLines: Set<string>): string {
function rerpQuery(
queryList: QueryList | null | undefined,
queryDict: QueryDict | null | undefined,
importLines: Set<string>
importLines: Set<string>,
): string | null {
if (queryDict) {
try {
@ -148,7 +148,7 @@ function reprHeaders(headers: Headers, importLines: Set<string>): string {
.map(
// TODO: convert to keywords and lowercase known headers
// TODO: :content-type is a top-level key and changes how the body is interpreted
(h) => repr(h[0], importLines) + " " + repr(h[1] as Word, importLines)
(h) => repr(h[0], importLines) + " " + repr(h[1] as Word, importLines),
);
return "{" + lines.join(",\n ") + "}";
}
@ -160,7 +160,7 @@ function indent(s: string, indent: number): string {
function reprJson(
obj: Word | Word[] | string | number | boolean | object | null,
importLines?: Set<string>
importLines?: Set<string>,
): string {
if (importLines && obj instanceof Word) {
return repr(obj, importLines);
@ -187,7 +187,7 @@ function reprJson(
const objReprs = Object.entries(obj).map(([k, v]) => {
if (!safeAsKeyword(k)) {
throw new CCError(
"can't use JSON key as Clojure keyword: " + JSON.stringify(k)
"can't use JSON key as Clojure keyword: " + JSON.stringify(k),
);
}
// TODO: indent logic is wrong?
@ -208,7 +208,7 @@ function addDataString(
params: Params,
request: Request,
data: Word,
importLines: Set<string>
importLines: Set<string>,
) {
const contentType = request.headers.getContentType();
const exactContentType = request.headers.get("content-type");
@ -260,7 +260,7 @@ function addData(
params: Params,
request: Request,
data: DataParam[],
importLines: Set<string>
importLines: Set<string>,
) {
if (data.length === 1 && data[0] instanceof Word && data[0].isString()) {
try {
@ -283,7 +283,7 @@ function addData(
parts.push("(slurp *in*)");
} else {
parts.push(
"(clojure.java.io/file " + repr(filename, importLines) + ")"
"(clojure.java.io/file " + repr(filename, importLines) + ")",
);
}
}
@ -300,7 +300,7 @@ function addData(
function reprMultipart(
form: FormParam[],
importLines: Set<string>,
warnings: Warnings
warnings: Warnings,
): string {
const parts = [];
for (const f of form) {
@ -330,7 +330,7 @@ function reprMultipart(
type Params = { [key: string]: string };
export function _toClojure(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings, { dataReadsFile: true });
@ -370,7 +370,7 @@ export function _toClojure(
const queryParams = rerpQuery(
request.urls[0].queryList,
request.urls[0].queryDict,
importLines
importLines,
);
if (queryParams) {
@ -416,7 +416,7 @@ export function _toClojure(
params["multipart"] = reprMultipart(
request.multipartUploads,
importLines,
warnings
warnings,
);
// TODO: above warning probably also applies here
}
@ -466,7 +466,7 @@ export function _toClojure(
if (request.connectTimeout) {
params["connection-timeout"] = times1000(
request.connectTimeout,
importLines
importLines,
);
}
@ -510,7 +510,7 @@ export function _toClojure(
export function toClojureWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const clojure = _toClojure(requests, warnings);

View File

@ -92,7 +92,7 @@ export function repr(w: Word, imports: Set<string>): string {
export function _toCSharp(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
@ -194,10 +194,10 @@ export function _toCSharp(
"last-modified": "LastModified",
};
const reqHeaders = request.headers.headers.filter(
(h) => !Object.keys(contentHeaders).includes(h[0].toLowerCase().toString())
(h) => !Object.keys(contentHeaders).includes(h[0].toLowerCase().toString()),
);
const reqContentHeaders = request.headers.headers.filter((h) =>
Object.keys(contentHeaders).includes(h[0].toLowerCase().toString())
Object.keys(contentHeaders).includes(h[0].toLowerCase().toString()),
);
if (
@ -372,7 +372,7 @@ export function _toCSharp(
}
export function toCSharpWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const cSharp = _toCSharp(requests, warnings);

View File

@ -34,7 +34,9 @@ function repr(value: Word, imports: Set<string>): string {
imports.add("dart:io");
} else {
ret.push(
"(await Process.run(" + reprStr(t.value) + ", runInShell: true)).stdout"
"(await Process.run(" +
reprStr(t.value) +
", runInShell: true)).stdout",
);
imports.add("dart:io");
}
@ -281,7 +283,7 @@ export function _toDart(requests: Request[], warnings: Warnings = []): string {
export function toDartWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const dart = _toDart(requests, warnings);

View File

@ -87,7 +87,7 @@ function getCookies(request: Request): string {
// TODO: this duplicates work, just get it from request.headers
const cookies = joinWords(
request.cookies.map((c) => joinWords(c, "=")),
"; "
"; ",
);
return `cookie: [${repr(cookies)}]`;
}
@ -112,7 +112,7 @@ function getOptions(request: Request, params: string): [string, string] {
let hackneyOptionsString = "";
if (hackneyOptions.length > 1) {
hackneyOptionsString = `hackney: [\n ${hackneyOptions.join(
",\n "
",\n ",
)}\n ]`;
} else if (hackneyOptions.length) {
hackneyOptionsString = `hackney: [${hackneyOptions[0]}]`;
@ -165,7 +165,7 @@ function getHeadersDict(request: Request): string {
const dictLines: string[] = [];
for (const [headerName, headerValue] of request.headers) {
dictLines.push(
` {${repr(headerName)}, ${repr(headerValue ?? new Word())}}`
` {${repr(headerName)}, ${repr(headerValue ?? new Word())}}`,
);
}
dict += dictLines.join(",\n");
@ -195,10 +195,10 @@ function getFormDataString(request: Request): string {
if ("contentFile" in m) {
formParams.push(
` {:file, ${repr(m.contentFile)}, {"form-data", [{:name, ${repr(
m.name
m.name,
)}}, {:filename, Path.basename(${repr(
m.filename ?? m.contentFile
)})}]}, []}`
m.filename ?? m.contentFile,
)})}]}, []}`,
);
} else {
formParams.push(` {${repr(m.name)}, ${repr(m.content)}}`);
@ -331,14 +331,14 @@ response = HTTPoison.request(request)
export function _toElixir(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
return requests.map((r) => requestToElixir(r, warnings)).join("\n");
}
export function toElixirWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const elixir = _toElixir(requests, warnings);

View File

@ -101,11 +101,11 @@ export function _toGo(requests: Request[], warnings: Warnings = []): string {
goCode += `\tfw, err ${op} writer.CreateFormFile(${repr(
m.name,
vars,
imports
imports,
)}, filepath.Base(${repr(
m.filename ?? m.contentFile,
vars,
imports
imports,
)}))\n`;
goCode += IF_ERR;
imports.add("path/filepath");
@ -113,7 +113,7 @@ export function _toGo(requests: Request[], warnings: Warnings = []): string {
goCode += `\tfd, err ${op} os.Open(${repr(
m.contentFile,
vars,
imports
imports,
)})\n`;
goCode += IF_ERR;
imports.add("os");
@ -126,13 +126,13 @@ export function _toGo(requests: Request[], warnings: Warnings = []): string {
goCode += `\tformField, err ${op} writer.CreateFormField(${repr(
m.name,
vars,
imports
imports,
)})\n`;
goCode += IF_ERR;
goCode += `\t_, err = formField.Write([]byte(${reprMaybeBacktick(
m.content,
vars,
imports
imports,
)}))\n`;
}
goCode += "\n";
@ -258,7 +258,7 @@ export function _toGo(requests: Request[], warnings: Warnings = []): string {
}
export function toGoWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const go = _toGo(requests, warnings);

View File

@ -49,7 +49,7 @@ function getDataString(request: Request): PostData | null {
export function _requestAndUrlToHar(
request: Request,
url: RequestUrl,
warnings: Warnings = []
warnings: Warnings = [],
): HARRequest {
const requestHar: HARRequest = {
method: url.method.toString(),
@ -57,8 +57,8 @@ export function _requestAndUrlToHar(
httpVersion: request.http3
? "HTTP/3"
: request.http2
? "HTTP/2"
: "HTTP/1.1",
? "HTTP/2"
: "HTTP/1.1",
cookies: [],
headers: [],
queryString: [],
@ -110,7 +110,7 @@ export function _requestAndUrlToHar(
export function _toHarString(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const harRequests = [];
for (const request of requests) {
@ -127,14 +127,14 @@ export function _toHarString(
},
},
null,
4
4,
) + "\n"
);
}
export function toHarStringWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
requests.map((r) => warnIfPartsIgnored(r, warnings, { multipleUrls: true }));

View File

@ -54,7 +54,7 @@ export function _toHTTP(requests: Request[], warnings: Warnings = []): string {
if (request.authType === "basic") {
request.headers.prependIfMissing(
"Authorization",
"Basic " + btoa(user.toString() + ":" + pass.toString())
"Basic " + btoa(user.toString() + ":" + pass.toString()),
);
}
}
@ -75,7 +75,7 @@ export function _toHTTP(requests: Request[], warnings: Warnings = []): string {
let boundary =
"------------------------" +
Array.from({ length: 16 }, () =>
"0123456789abcdef".charAt(Math.floor(Math.random() * 16))
"0123456789abcdef".charAt(Math.floor(Math.random() * 16)),
).join("");
// crypto.getRandomValues() only available on Node 19+
// Array.from(crypto.getRandomValues(new Uint8Array(8)))
@ -86,14 +86,14 @@ export function _toHTTP(requests: Request[], warnings: Warnings = []): string {
// TODO: we already added Content-Type earlier but curl puts Content-Type after Content-Length
request.headers.setIfMissing(
"Content-Length",
request.data.toString().length.toString()
request.data.toString().length.toString(),
);
} else if (request.urls[0].uploadFile) {
const contentLength =
"<length of " + request.urls[0].uploadFile.toString() + ">";
const wasMissing = request.headers.setIfMissing(
"Content-Length",
contentLength
contentLength,
);
if (wasMissing) {
warnings.push([
@ -120,7 +120,7 @@ export function _toHTTP(requests: Request[], warnings: Warnings = []): string {
// TODO: could existing Content-Type have other stuff that needs to be preserved?
request.headers.set(
"Content-Type",
"multipart/form-data; boundary=" + boundary
"multipart/form-data; boundary=" + boundary,
);
}
}
@ -179,7 +179,7 @@ export function _toHTTP(requests: Request[], warnings: Warnings = []): string {
export function toHTTPWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const http = _toHTTP(requests, warnings);

View File

@ -99,14 +99,14 @@ function escapeJsonStr(value: string): string {
throw new CCError(
"Unrepresentable JSON string: " +
JSON.stringify(value) +
' (starts with "\\=")'
' (starts with "\\=")',
);
}
if (value.startsWith("\\@")) {
throw new CCError(
"Unrepresentable JSON string: " +
JSON.stringify(value) +
' (starts with "\\@")'
' (starts with "\\@")',
);
}
if (value.startsWith("=") || value.startsWith("@")) {
@ -138,8 +138,8 @@ function toJson(obj: any, key = ""): string[] {
value,
key
? key + "[" + escapeJsonName(name) + "]"
: escapeJsonName(name, true)
)
: escapeJsonName(name, true),
),
)
.flat();
}
@ -194,7 +194,7 @@ function urlencodedAsHttpie(flags: string[], items: string[], data: Word) {
flags.push("--form");
for (const [name, value] of queryList) {
items.push(
repr(mergeWords(escapeQueryName(name), "=", escapeQueryValue(value)))
repr(mergeWords(escapeQueryName(name), "=", escapeQueryValue(value))),
);
}
}
@ -203,7 +203,7 @@ function formatData(
flags: string[],
items: string[],
data: Word,
headers: Headers
headers: Headers,
) {
const contentType = headers.getContentType();
if (contentType === "application/json" && data.isString()) {
@ -223,7 +223,7 @@ function escapeFormName(name: Word): Word {
function requestToHttpie(
request: Request,
url: RequestUrl,
warnings: Warnings
warnings: Warnings,
): string {
const flags: string[] = [];
let method: string | null = null;
@ -251,9 +251,9 @@ function requestToHttpie(
mergeWords(
escapeHeader(headerName),
":",
escapeHeaderValue(headerValue)
)
)
escapeHeaderValue(headerValue),
),
),
);
}
}
@ -297,7 +297,7 @@ function requestToHttpie(
urlArg = url.urlWithoutQueryList;
for (const [name, value] of url.queryList) {
items.push(
repr(mergeWords(escapeQueryName(name), "==", escapeQueryValue(value)))
repr(mergeWords(escapeQueryName(name), "==", escapeQueryValue(value))),
);
}
}
@ -457,7 +457,7 @@ function requestToHttpie(
export function _toHttpie(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const commands = [];
@ -494,7 +494,7 @@ export function _toHttpie(
export function toHttpieWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const httpie = _toHttpie(requests, warnings);

View File

@ -13,7 +13,7 @@ const supportedArgs = new Set([
export function _toJavaHttpUrlConnection(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
@ -137,7 +137,7 @@ export function _toJavaHttpUrlConnection(
}
export function toJavaHttpUrlConnectionWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const java = _toJavaHttpUrlConnection(requests, warnings);
@ -145,7 +145,7 @@ export function toJavaHttpUrlConnectionWarn(
}
export function toJavaHttpUrlConnection(
curlCommand: string | string[]
curlCommand: string | string[],
): string {
return toJavaHttpUrlConnectionWarn(curlCommand)[0];
}

View File

@ -107,7 +107,7 @@ export function _toJava(requests: Request[], warnings: Warnings = []): string {
clientLines.push(
" .connectTimeout(Duration.ofSeconds(" +
request.connectTimeout.toString() +
"))\n"
"))\n",
);
imports.add("java.time.Duration");
}
@ -130,7 +130,7 @@ export function _toJava(requests: Request[], warnings: Warnings = []): string {
warnings.push(["upload-stdin", "uploading from stdin isn't supported"]);
}
methodCallArgs.push(
"BodyPublishers.ofFile(Paths.get(" + repr(url.uploadFile, imports) + "))"
"BodyPublishers.ofFile(Paths.get(" + repr(url.uploadFile, imports) + "))",
);
imports.add("java.net.http.HttpRequest.BodyPublishers");
imports.add("java.nio.file.Paths");
@ -145,13 +145,13 @@ export function _toJava(requests: Request[], warnings: Warnings = []): string {
methodCallArgs.push(
"BodyPublishers.ofFile(Paths.get(" +
repr(request.dataArray[0].filename, imports) +
"))"
"))",
);
imports.add("java.net.http.HttpRequest.BodyPublishers");
imports.add("java.nio.file.Paths");
} else if (request.data) {
methodCallArgs.push(
"BodyPublishers.ofString(" + repr(request.data, imports) + ")"
"BodyPublishers.ofString(" + repr(request.data, imports) + ")",
);
imports.add("java.net.http.HttpRequest.BodyPublishers");
}
@ -268,7 +268,7 @@ export function _toJava(requests: Request[], warnings: Warnings = []): string {
}
export function toJavaWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const java = _toJava(requests, warnings);

View File

@ -15,7 +15,7 @@ const supportedArgs = new Set([
export const _toJavaJsoup = (
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string => {
const request = getFirst(requests, warnings);
@ -161,7 +161,7 @@ export const _toJavaJsoup = (
};
export const toJavaJsoupWarn = (
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] => {
const requests = parse(curlCommand, supportedArgs, warnings);
const java = _toJavaJsoup(requests, warnings);

View File

@ -18,7 +18,7 @@ const supportedArgs = new Set([
export function _toJavaOkHttp(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
const url = request.urls[0];
@ -36,7 +36,9 @@ export function _toJavaOkHttp(
const clientLines = [];
if (request.timeout) {
clientLines.push(
" .callTimeout(" + request.timeout.toString() + ", TimeUnit.SECONDS)\n"
" .callTimeout(" +
request.timeout.toString() +
", TimeUnit.SECONDS)\n",
);
imports.add("java.util.concurrent.TimeUnit");
}
@ -44,7 +46,7 @@ export function _toJavaOkHttp(
clientLines.push(
" .connectTimeout(" +
request.connectTimeout.toString() +
", TimeUnit.SECONDS)\n"
", TimeUnit.SECONDS)\n",
);
imports.add("java.util.concurrent.TimeUnit");
}
@ -150,7 +152,7 @@ export function _toJavaOkHttp(
'"", ' + // TODO: this is the media type
"new File(" +
repr(m.contentFile, imports) +
"))"
"))",
);
imports.add("java.io.File");
} else {
@ -243,7 +245,7 @@ export function _toJavaOkHttp(
}
export function toJavaOkHttpWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const java = _toJavaOkHttp(requests, warnings);

View File

@ -28,7 +28,7 @@ const supportedArgs = new Set([
// TODO: @
function _getDataString(
request: Request,
imports: JSImports
imports: JSImports,
): [string | null, string | null] {
if (!request.data) {
return [null, null];
@ -77,7 +77,7 @@ function _getDataString(
}
function getDataString(
request: Request,
imports: JSImports
imports: JSImports,
): [string | null, string | null] {
if (!request.data) {
return [null, null];
@ -102,7 +102,7 @@ function buildConfigObject(
methods: string[],
dataMethods: string[],
hasSearchParams: boolean,
imports: JSImports
imports: JSImports,
): string {
let code = "{\n";
@ -231,7 +231,7 @@ function buildConfigObject(
export function _toNodeAxios(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
@ -361,7 +361,7 @@ export function _toNodeAxios(
methods,
dataMethods,
!!hasSearchParams,
imports
imports,
);
if (needsData) {
code += ",\n";
@ -384,7 +384,7 @@ export function _toNodeAxios(
}
export function toNodeAxiosWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const nodeAxios = _toNodeAxios(requests, warnings);

View File

@ -47,7 +47,7 @@ const supportedArgs = new Set([
function getBodyString(
request: Request,
imports: JSImports
imports: JSImports,
): [string | null, string | null] {
const contentType = request.headers.getContentType();
// can have things like ; charset=utf-8 which we want to preserve
@ -112,7 +112,7 @@ function buildOptionsObject(
methods: string[],
nonDataMethods: string[],
warnings: Warnings,
imports: JSImports
imports: JSImports,
): string {
let code = "{\n";
@ -129,7 +129,7 @@ function buildOptionsObject(
reprAsStringToStringDict(
request.urls[0].queryDict as [Word, Word][],
1,
imports
imports,
) +
",\n";
} else if (request.urls[0].queryList) {
@ -250,7 +250,7 @@ function buildOptionsObject(
export function _toNodeGot(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
@ -336,7 +336,7 @@ export function _toNodeGot(
methods,
nonDataMethods,
warnings,
imports
imports,
);
}
@ -351,7 +351,7 @@ export function _toNodeGot(
}
export function toNodeGotWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const nodeGot = _toNodeGot(requests, warnings);

View File

@ -25,7 +25,7 @@ const supportedArgs = new Set([
function _getDataString(
data: Word,
contentType: string | null | undefined,
imports: JSImports
imports: JSImports,
): [string, string | null] {
const originalStringRepr = repr(data, imports);
@ -58,7 +58,7 @@ function _getDataString(
export function getDataString(
data: Word,
contentType: string | null | undefined,
imports: JSImports
imports: JSImports,
): [string, string | null] {
let dataString: string | null = null;
let commentedOutDataString: string | null = null;
@ -66,7 +66,7 @@ export function getDataString(
[dataString, commentedOutDataString] = _getDataString(
data,
contentType,
imports
imports,
);
} catch {}
if (!dataString) {
@ -77,7 +77,7 @@ export function getDataString(
export function _toNodeHttp(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
const imports: JSImports = [];
@ -108,7 +108,7 @@ export function _toNodeHttp(
[dataString, commentedOutDataString] = getDataString(
request.data,
contentType,
imports
imports,
);
} else if (request.multipartUploads) {
formString = getFormString(request.multipartUploads, imports);
@ -221,7 +221,7 @@ export function _toNodeHttp(
export function toNodeHttpWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toNodeHttp(requests, warnings);

View File

@ -70,7 +70,7 @@ export function reprPairs(
indentLevel = 0,
indent = " ",
list = true,
imports: JSImports
imports: JSImports,
): string {
if (d.length === 0) {
return list ? "[]" : "{}";
@ -93,7 +93,7 @@ export function reprAsStringToStringDict(
d: [Word, Word][],
indentLevel = 0,
imports: JSImports,
indent = " "
indent = " ",
): string {
return reprPairs(d, indentLevel, indent, false, imports);
}
@ -102,7 +102,7 @@ export function reprAsStringTuples(
d: [Word, Word][],
indentLevel = 0,
imports: JSImports,
indent = " "
indent = " ",
): string {
return reprPairs(d, indentLevel, indent, true, imports);
}
@ -112,7 +112,7 @@ export function reprStringToStringList(
indentLevel = 0,
imports: JSImports,
indent = " ",
list = true
list = true,
): string {
if (d.length === 0) {
return list ? "[]" : "{}";
@ -228,7 +228,7 @@ export function reprImportsRequire(imports: JSImports): string {
for (const [name, from] of imports.sort(bySecondElem)) {
if (name.startsWith("* as ")) {
ret.push(
`const ${name.slice("* as ".length)} = require(${reprStr(from)});`
`const ${name.slice("* as ".length)} = require(${reprStr(from)});`,
);
} else if (name.includes(".")) {
ret.push(`const ${name} = require(${reprStr(from)}).${name};`);
@ -282,7 +282,7 @@ export function reprBrowser(w: Word, warnings: [string, string][]): string {
export function reprFetch(
w: Word,
isNode: boolean,
imports: JSImports
imports: JSImports,
): string {
if (!isNode) {
// TODO: warn
@ -333,7 +333,7 @@ export function bySecondElem(a: [string, string], b: [string, string]): number {
export function toURLSearchParams(
query: [QueryList, QueryDict | null | undefined],
imports: JSImports,
indent = 1
indent = 1,
): string {
const [queryList, queryDict] = query;
const queryObj =
@ -346,7 +346,7 @@ export function toURLSearchParams(
export function toDictOrURLSearchParams(
query: [QueryList, QueryDict | null | undefined],
imports: JSImports,
indent = 1
indent = 1,
): string {
const [queryList, queryDict] = query;
@ -354,7 +354,7 @@ export function toDictOrURLSearchParams(
return reprAsStringToStringDict(
queryDict as [Word, Word][],
indent,
imports
imports,
);
}
@ -370,7 +370,7 @@ export function toFormData(
imports: JSImports,
fetchImports: Set<string>,
warnings: Warnings,
isNode = true
isNode = true,
): string {
let code = "new FormData();\n";
for (const m of multipartUploads) {
@ -414,7 +414,7 @@ function getDataString(
request: Request,
data: Word,
isNode: boolean,
imports: JSImports
imports: JSImports,
): [string, string | null] {
const originalStringRepr = reprFetch(data, isNode, imports);
@ -446,7 +446,7 @@ function getDataString(
if (
eq(
request.headers.get("content-type"),
"application/x-www-form-urlencoded"
"application/x-www-form-urlencoded",
)
) {
request.headers.delete("content-type");
@ -466,7 +466,7 @@ function getDataString(
export function getData(
request: Request,
isNode: boolean,
imports: JSImports
imports: JSImports,
): [string, string | null] {
if (!request.dataArray) {
return ["", null];
@ -484,7 +484,7 @@ export function getData(
const parts = [];
const hasBinary = request.dataArray.some(
(d) => !(d instanceof Word) && d.filetype === "binary"
(d) => !(d instanceof Word) && d.filetype === "binary",
);
const encoding = hasBinary ? "" : ", 'utf-8'";
for (const d of request.dataArray) {
@ -513,14 +513,14 @@ export function getData(
"fs.readFileSync(" +
reprFetch(filename, isNode, imports) +
encoding +
")"
")",
);
} else {
// TODO: warn that file needs content
parts.push(
"new File([/* contents */], " +
reprFetch(filename, isNode, imports) +
")"
")",
);
}
}
@ -550,7 +550,7 @@ function requestToJavaScriptOrNode(
warnings: Warnings,
fetchImports: Set<string>,
imports: JSImports,
isNode: boolean
isNode: boolean,
): string {
warnIfPartsIgnored(request, warnings, {
multipleUrls: true,
@ -573,7 +573,7 @@ function requestToJavaScriptOrNode(
imports,
fetchImports,
warnings,
isNode
isNode,
);
code += "\n";
}
@ -582,7 +582,7 @@ function requestToJavaScriptOrNode(
const [dataString, commentedOutDataString] = getData(
request,
isNode,
imports
imports,
);
let fn = "fetch";
@ -759,14 +759,14 @@ function requestToJavaScriptOrNode(
export function _toJavaScriptOrNode(
requests: Request[],
warnings: Warnings,
isNode: boolean
isNode: boolean,
): string {
const fetchImports = new Set<string>();
const imports: JSImports = [];
const code = requests
.map((r) =>
requestToJavaScriptOrNode(r, warnings, fetchImports, imports, isNode)
requestToJavaScriptOrNode(r, warnings, fetchImports, imports, isNode),
)
.join("\n");
@ -793,7 +793,7 @@ export function _toJavaScriptOrNode(
export function _toJavaScript(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
return _toJavaScriptOrNode(requests, warnings, false);
}
@ -803,7 +803,7 @@ export function _toNode(requests: Request[], warnings: Warnings = []): string {
export function toJavaScriptWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, javaScriptSupportedArgs, warnings);
return [_toJavaScript(requests, warnings), warnings];
@ -815,7 +815,7 @@ export function toJavaScript(curlCommand: string | string[]): string {
export function toNodeWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, nodeSupportedArgs, warnings);
return [_toNode(requests, warnings), warnings];

View File

@ -35,7 +35,7 @@ export function commentOut(s: string, indent = 0): string {
export function serializeQuery(
query: Query,
imports: JSImports
imports: JSImports,
): [string, boolean] {
const [queryList, queryDict] = query;
let code = "";
@ -83,7 +83,7 @@ function _getDataString(
data: Word,
contentType: string | null | undefined,
exactContentType: Word | null | undefined,
imports: JSImports
imports: JSImports,
): [Word | null | undefined, string, string | null, boolean] {
let traditional = false;
const originalStringRepr = repr(data, imports);
@ -126,7 +126,7 @@ export function getDataString(
data: Word,
contentType: string | null | undefined,
exactContentType: Word | null | undefined,
imports: JSImports
imports: JSImports,
): [Word | null | undefined, string, string | null, boolean] {
let dataString: string | null = null;
let commentedOutDataString: string | null = null;
@ -143,7 +143,7 @@ export function getDataString(
export function getFormString(
multipartUploads: FormParam[],
imports: JSImports
imports: JSImports,
): string {
let code = "const form = new FormData();\n";
for (const m of multipartUploads) {
@ -170,7 +170,7 @@ export function getFormString(
export function _toJavaScriptJquery(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
const imports: JSImports = [];
@ -235,7 +235,7 @@ export function _toJavaScriptJquery(
[dataString, traditional] = serializeQuery(
[request.urls[0].queryList, request.urls[0].queryDict ?? null],
imports
imports,
);
dataString = indent(dataString);
url = request.urls[0].urlWithoutQueryList;
@ -349,7 +349,7 @@ export function _toJavaScriptJquery(
export function toJavaScriptJqueryWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const jquery = _toJavaScriptJquery(requests, warnings);

View File

@ -38,7 +38,7 @@ const supportedArgs = new Set([
function getDataString(
request: Request,
data: Word,
imports: JSImports
imports: JSImports,
): [string, string | null] {
const originalStringRepr = "body: " + repr(data, imports);
@ -66,7 +66,7 @@ function getDataString(
if (
eq(
request.headers.get("content-type"),
"application/x-www-form-urlencoded"
"application/x-www-form-urlencoded",
)
) {
request.headers.delete("content-type");
@ -89,7 +89,7 @@ function getDataString(
export function getData(
request: Request,
isNode: boolean,
imports: JSImports
imports: JSImports,
): [string, string | null] {
if (!request.dataArray) {
return ["", null];
@ -107,7 +107,7 @@ export function getData(
const parts = [];
const hasBinary = request.dataArray.some(
(d) => !(d instanceof Word) && d.filetype === "binary"
(d) => !(d instanceof Word) && d.filetype === "binary",
);
const encoding = hasBinary ? "" : ", 'utf-8'";
for (const d of request.dataArray) {
@ -124,7 +124,7 @@ export function getData(
parts.push("fs.readFileSync(0" + encoding + ")");
} else {
parts.push(
"fs.readFileSync(" + repr(filename, imports) + encoding + ")"
"fs.readFileSync(" + repr(filename, imports) + encoding + ")",
);
}
addImport(imports, "* as fs", "fs");
@ -154,7 +154,7 @@ export function getData(
function requestToKy(
request: Request,
warnings: Warnings,
imports: JSImports
imports: JSImports,
): string {
warnIfPartsIgnored(request, warnings, {
multipleUrls: true,
@ -304,7 +304,7 @@ export function _toNodeKy(requests: Request[], warnings?: Warnings): string {
export function toNodeKyWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toNodeKy(requests, warnings);

View File

@ -20,7 +20,7 @@ function requestToNodeRequest(
requestIndex: number,
definedVariables: Set<string>,
imports: JSImports,
warnings: Warnings = []
warnings: Warnings = [],
): string {
warnIfPartsIgnored(request, warnings);
@ -45,14 +45,14 @@ function requestToNodeRequest(
nodeRequestCode += defVar(
definedVariables,
"dataString",
repr(request.data, imports) + ";\n\n"
repr(request.data, imports) + ";\n\n",
);
}
nodeRequestCode += defVar(definedVariables, "options", "{\n");
const path = mergeWords(
request.urls[0].urlObj.path,
request.urls[0].urlObj.query
request.urls[0].urlObj.query,
);
if (path.toBool() && INVALID_PATH_REGEX.test(path.toString())) {
warnings.push([
@ -125,21 +125,21 @@ function defVar(variables: Set<string>, name: string, value: string): string {
export function _toNodeRequest(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const code = "var request = require('request');\n";
const definedVariables = new Set(["request"]);
const imports: JSImports = [];
const requestCode = requests.map((r, i) =>
requestToNodeRequest(r, i, definedVariables, imports, warnings)
requestToNodeRequest(r, i, definedVariables, imports, warnings),
);
return code + reprImportsRequire(imports) + "\n" + requestCode.join("\n\n");
}
export function toNodeRequestWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
warnings.unshift(["node-request", "the request package is deprecated"]);

View File

@ -41,7 +41,7 @@ const supportedArgs = new Set([
function serializeQuery(
fn: "query" | "send",
query: Query,
imports: JSImports
imports: JSImports,
): string {
const [queryList, queryDict] = query;
let code = "";
@ -85,7 +85,7 @@ function _getDataString(
request: Request,
contentType: string | null | undefined,
exactContentType: Word | null | undefined,
imports: JSImports
imports: JSImports,
): [Word | null | undefined, string | null, string | null] {
if (!request.data) {
return [exactContentType, null, null];
@ -129,7 +129,7 @@ export function getDataString(
request: Request,
contentType: string | null | undefined,
exactContentType: Word | null | undefined,
imports: JSImports
imports: JSImports,
): [Word | null | undefined, string | null, string | null] {
if (!request.data) {
return [exactContentType, null, null];
@ -142,7 +142,7 @@ export function getDataString(
request,
contentType,
exactContentType,
imports
imports,
);
} catch {}
if (!dataString) {
@ -153,7 +153,7 @@ export function getDataString(
export function getFormString(
multipartUploads: FormParam[],
imports: JSImports
imports: JSImports,
): string {
let code = "";
for (const m of multipartUploads) {
@ -184,7 +184,7 @@ export function getFormString(
export function _toNodeSuperAgent(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
const imports: JSImports = [];
@ -220,7 +220,7 @@ export function _toNodeSuperAgent(
request,
contentType,
exactContentType,
imports
imports,
);
} else if (request.multipartUploads) {
dataCode = getFormString(request.multipartUploads, imports);
@ -257,7 +257,7 @@ export function _toNodeSuperAgent(
code += serializeQuery(
"query",
[request.urls[0].queryList, request.urls[0].queryDict ?? null],
imports
imports,
);
}
@ -362,7 +362,7 @@ export function _toNodeSuperAgent(
export function toNodeSuperAgentWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toNodeSuperAgent(requests, warnings);

View File

@ -24,7 +24,7 @@ const supportedArgs = new Set([
function _getDataString(
data: Word,
contentType: string | null | undefined,
imports: JSImports
imports: JSImports,
): [string, string | null] {
const originalStringRepr = repr(data, imports);
@ -52,7 +52,7 @@ function _getDataString(
export function getDataString(
data: Word,
contentType: string | null | undefined,
imports: JSImports
imports: JSImports,
): [string, string | null] {
let dataString: string | null = null;
let commentedOutDataString: string | null = null;
@ -60,7 +60,7 @@ export function getDataString(
[dataString, commentedOutDataString] = _getDataString(
data,
contentType,
imports
imports,
);
} catch {}
if (!dataString) {
@ -71,7 +71,7 @@ export function getDataString(
export function _toJavaScriptXHR(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
const imports: JSImports = [];
@ -101,7 +101,7 @@ export function _toJavaScriptXHR(
const [dataString, commentedOutDataString] = getDataString(
request.data,
contentType,
imports
imports,
);
if (commentedOutDataString) {
code += "// const data = " + commentedOutDataString + ";\n";
@ -181,7 +181,7 @@ export function _toJavaScriptXHR(
export function toJavaScriptXHRWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toJavaScriptXHR(requests, warnings);

View File

@ -65,7 +65,7 @@ type JSONOutput = {
};
function getDataString(
request: Request
request: Request,
): { [key: string]: string | string[] } | string {
if (!request.data) {
return {};
@ -87,13 +87,13 @@ function getDataString(
Array.isArray(param[1])
? param[1].map((v) => v.toString())
: param[1].toString(),
])
]),
);
}
if (parsedQuery) {
// .fromEntries() means we lose data when there are repeated keys
return Object.fromEntries(
parsedQuery.map((param) => [param[0].toString(), param[1].toString()])
parsedQuery.map((param) => [param[0].toString(), param[1].toString()]),
);
}
}
@ -103,7 +103,7 @@ function getDataString(
}
function getFilesString(
request: Request
request: Request,
):
| { files?: { [key: string]: string }; data?: { [key: string]: string } }
| undefined {
@ -135,7 +135,7 @@ function getFilesString(
export function _toJsonString(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
@ -158,7 +158,7 @@ export function _toJsonString(
if (request.cookies) {
// TODO: repeated cookies
requestJson.cookies = Object.fromEntries(
request.cookies.map((c) => [c[0].toString(), c[1].toString()])
request.cookies.map((c) => [c[0].toString(), c[1].toString()]),
);
// Normally when a generator uses .cookies, it should delete it from
// headers, but users of the JSON output would expect to have all the
@ -180,7 +180,7 @@ export function _toJsonString(
request.urls[0].queryDict.map((q) => [
q[0].toString(),
Array.isArray(q[1]) ? q[1].map((qq) => qq.toString()) : q[1].toString(),
])
]),
);
}
@ -231,13 +231,13 @@ export function _toJsonString(
JSON.stringify(
Object.keys(requestJson).length ? requestJson : "{}",
null,
4
4,
) + "\n"
);
}
export function toJsonStringWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const json = _toJsonString(requests, warnings);

View File

@ -91,7 +91,7 @@ export function repr(w: Word): string {
function jsonAsJulia(
obj: string | number | boolean | object | null,
indent = 0
indent = 0,
): string {
if (isLosslessNumber(obj)) {
// TODO: why is it undefined
@ -152,7 +152,7 @@ function jsonAsJulia(
}
default:
throw new CCError(
"unexpected object type that shouldn't appear in JSON: " + typeof obj
"unexpected object type that shouldn't appear in JSON: " + typeof obj,
);
}
}
@ -173,7 +173,7 @@ function formatData(request: Request, imports: Set<string>): [string, string] {
if (contentType === "application/json") {
try {
const jsonData = jsonParseLossless(
request.dataArray[0].toString()
request.dataArray[0].toString(),
) as any;
const result = jsonAsJulia(jsonData);
imports.add("JSON");
@ -267,7 +267,7 @@ export function _toJulia(requests: Request[], warnings: Warnings = []): string {
code +=
' "Authorization" => "Basic " * base64encode(' +
repr(
mergeWords(request.urls[0].auth[0], ":", request.urls[0].auth[1])
mergeWords(request.urls[0].auth[0], ":", request.urls[0].auth[1]),
) +
"),\n";
imports.add("Base64");
@ -387,7 +387,7 @@ export function _toJulia(requests: Request[], warnings: Warnings = []): string {
export function toJuliaWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toJulia(requests, warnings);

View File

@ -83,7 +83,7 @@ export function repr(w: Word, imports: Set<string>): string {
export function _toKotlin(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
const url = request.urls[0];
@ -101,7 +101,7 @@ export function _toKotlin(
if (request.timeout) {
// TODO: floats don't work here
clientLines.push(
" .callTimeout(" + request.timeout.toString() + ", TimeUnit.SECONDS)\n"
" .callTimeout(" + request.timeout.toString() + ", TimeUnit.SECONDS)\n",
);
imports.add("java.util.concurrent.TimeUnit");
}
@ -109,7 +109,7 @@ export function _toKotlin(
clientLines.push(
" .connectTimeout(" +
request.connectTimeout.toString() +
", TimeUnit.SECONDS)\n"
", TimeUnit.SECONDS)\n",
);
imports.add("java.util.concurrent.TimeUnit");
}
@ -238,7 +238,7 @@ export function _toKotlin(
if ("filename" in m && m.filename) {
args.push(repr(m.filename, imports));
args.push(
"File(" + repr(m.contentFile, imports) + ").asRequestBody()" // TODO: content type here
"File(" + repr(m.contentFile, imports) + ").asRequestBody()", // TODO: content type here
);
imports.add("java.io.File");
imports.add("okhttp3.RequestBody.Companion.asRequestBody");
@ -343,7 +343,7 @@ export function _toKotlin(
}
export function toKotlinWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const kotlin = _toKotlin(requests, warnings);

View File

@ -198,7 +198,7 @@ export function _toLua(requests: Request[], warnings: Warnings = []): string {
export function toLuaWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const lua = _toLua(requests, warnings);

View File

@ -80,7 +80,7 @@ function repr(w: Word | null) {
function setVariableValue(
outputVariable: string | null,
value: string,
termination?: string
termination?: string,
): string {
let result = "";
@ -100,7 +100,7 @@ function callFunction(
outputVariable: string | null,
functionName: string,
params: string | string[] | string[][],
termination?: string
termination?: string,
) {
let functionCall = functionName + "(";
if (Array.isArray(params)) {
@ -136,7 +136,7 @@ function addCellArray(
mapping: ([Word, Word] | [string, Word | string])[],
keysNotToQuote?: string[],
indentLevel = 1,
pairs?: boolean
pairs?: boolean,
) {
if (mapping.length === 0) return ""; // shouldn't happen
@ -187,7 +187,7 @@ function addCellArray(
function structify(
obj: number[] | string[] | { [key: string]: string } | string | number | null,
indentLevel?: number
indentLevel?: number,
) {
let response = "";
indentLevel = !indentLevel ? 1 : ++indentLevel;
@ -219,7 +219,7 @@ function structify(
if (Object.prototype.hasOwnProperty.call(obj, k)) {
if (!k[0].match(/[a-z]/i)) {
throw new CCError(
"MATLAB structs do not support keys starting with non-alphabet symbols"
"MATLAB structs do not support keys starting with non-alphabet symbols",
);
}
// recursive call to scan property

View File

@ -33,10 +33,10 @@ function prepareHeaders(request: Request): string | null {
"@(x) Cookie(x{:})",
callFunction(null, "num2cell", ["cookies", "2"], ""),
],
""
"",
);
headerStrs.push(
callFunction(null, "field.CookieField", cookieFieldParams, "")
callFunction(null, "field.CookieField", cookieFieldParams, ""),
);
} else if (keyStr === "accept") {
const accepts = value.split(",");
@ -113,7 +113,7 @@ function prepareMultipartUploads(request: Request): string | null {
"",
1,
true,
!readsFile
!readsFile,
);
params.push([repr(m.name), fileProvider as string]); // TODO: can be a string[]
}
@ -138,7 +138,7 @@ function prepareDataProvider(
termination: string,
indentLevel: number,
isDataBinary = true,
isDataRaw = false
isDataRaw = false,
): string | string[] {
if (!isDataRaw && value.charAt(0) === "@") {
const filename = value.slice(1);
@ -194,14 +194,14 @@ function prepareData(request: Request) {
null,
"JSONProvider",
structify(jsonData, 1),
""
"",
);
}
} catch (e) {}
}
return ans;
})
}),
);
return callFunction("body", "FormProvider", data);
}
@ -212,7 +212,7 @@ function prepareData(request: Request) {
";",
0,
!!request.isDataBinary,
!!request.isDataRaw
!!request.isDataRaw,
);
if (!response) {
response = setVariableValue("body", repr(request.data));
@ -251,7 +251,7 @@ function prepareRequestMessage(request: Request): string {
"response",
"RequestMessage",
reqMessage,
callFunction(null, ".send", params)
callFunction(null, ".send", params),
),
];
@ -260,7 +260,7 @@ function prepareRequestMessage(request: Request): string {
export function toHTTPInterface(
request: Request,
warnings: Warnings
warnings: Warnings,
): [(string | string[] | null)[], Warnings] {
return [
[

View File

@ -14,7 +14,7 @@ const supportedArgs = new Set([
export function _toMATLAB(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
@ -28,7 +28,7 @@ export function _toMATLAB(
}
export function toMATLABWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const matlab = _toMATLAB(requests, warnings);

View File

@ -18,7 +18,7 @@ import {
function isSupportedByWebServices(request: Request): boolean {
return (
["get", "post", "put", "delete", "patch"].includes(
request.urls[0].method.toLowerCase().toString()
request.urls[0].method.toLowerCase().toString(),
) &&
!request.multipartUploads &&
!request.insecure
@ -41,7 +41,7 @@ function setHeader(
headers: [Word, Word][],
header: Word,
value: Word,
lowercase: boolean
lowercase: boolean,
) {
headers.push([lowercase ? header.toLowerCase() : header, value]);
}
@ -65,13 +65,13 @@ function parseWebOptions(request: Request): Options {
options.Password = password;
} else {
const authHeader = `['Basic ' matlab.net.base64encode(${repr(
joinWords(request.urls[0].auth, ":")
joinWords(request.urls[0].auth, ":"),
)})]`;
setHeader(
headers,
new Word("Authorization"),
new Word(authHeader),
request.headers.lowercase
request.headers.lowercase,
);
preformattedHeaders.push("authorization");
}
@ -96,7 +96,7 @@ function parseWebOptions(request: Request): Options {
headers,
new Word("Cookie"),
new Word(cookieString),
request.headers.lowercase
request.headers.lowercase,
);
preformattedHeaders.push("cookie");
} else {
@ -149,7 +149,7 @@ function parseWebOptions(request: Request): Options {
options.HeaderFields = addCellArray(
headers,
preformattedHeaders,
indentLevel
indentLevel,
);
}
@ -165,7 +165,7 @@ function prepareOptions(request: Request, options: Options): string[] {
Object.entries(options),
["headerfields"],
1,
true
true,
);
lines.push(callFunction("options", "weboptions", pairValues));
@ -176,7 +176,7 @@ function prepareBasicURI(request: Request): string[] {
const response: string[] = [];
if (request.urls[0].queryList) {
response.push(
setVariableValue("baseURI", repr(request.urls[0].urlWithoutQueryList))
setVariableValue("baseURI", repr(request.urls[0].urlWithoutQueryList)),
);
response.push(setVariableValue("uri", `[baseURI '?' ${paramsString}]`));
} else {
@ -195,7 +195,7 @@ function prepareBasicData(request: Request): string | string[] {
let response: string | string[] = [];
if (request.data.charAt(0) === "@") {
response.push(
callFunction("body", "fileread", repr(request.data.slice(1)))
callFunction("body", "fileread", repr(request.data.slice(1))),
);
if (!request.isDataBinary) {
@ -238,7 +238,7 @@ function prepareWebCall(request: Request, options: Options): string[] {
export function toWebServices(
request: Request,
warnings: Warnings
warnings: Warnings,
): [(string | string[] | null)[], Warnings] {
let lines: (string | string[] | null)[] = [
"%% Web Access using Data Import and Export API",

View File

@ -61,7 +61,7 @@ export function repr(w: Word): string {
args.push(
"[[[NSProcessInfo processInfo] environment] objectForKey:" +
reprStr(t.value) +
"]"
"]",
);
} else if (t.type === "command") {
// TODO: doesn't return the output
@ -93,7 +93,7 @@ const reservedHeaders = [
export function _toObjectiveC(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings, { dataReadsFile: true });
let code = "";
@ -138,7 +138,7 @@ export function _toObjectiveC(
if (request.urls[0].auth) {
warnings.push(["reserved-header", "Authorization is a reserved header"]);
headerLines.push(
' @"Authorization": [NSString stringWithFormat:@"Basic %@", base64Credentials]'
' @"Authorization": [NSString stringWithFormat:@"Basic %@", base64Credentials]',
);
}
if (headerLines.length) {
@ -208,7 +208,7 @@ export function _toObjectiveC(
parts.push(
"[NSString stringWithContentsOfFile:" +
repr(entry.filename) +
" encoding:NSUTF8StringEncoding error:nil];"
" encoding:NSUTF8StringEncoding error:nil];",
);
}
}
@ -286,7 +286,7 @@ export function _toObjectiveC(
export function toObjectiveCWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toObjectiveC(requests, warnings);

View File

@ -157,7 +157,7 @@ export function _toOCaml(requests: Request[], warnings: Warnings = []): string {
export function toOCamlWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toOCaml(requests, warnings);

View File

@ -169,15 +169,15 @@ export function _toPerl(requests: Request[], warnings: Warnings = []): string {
args.push(
'Authorization => "Basic " . MIME::Base64::encode(' +
repr(
mergeWords(request.urls[0].auth[0], ":", request.urls[0].auth[1])
mergeWords(request.urls[0].auth[0], ":", request.urls[0].auth[1]),
) +
")"
")",
);
}
}
if (request.urls[0].uploadFile) {
args.push(
"Content => read_file(" + repr(request.urls[0].uploadFile) + ")"
"Content => read_file(" + repr(request.urls[0].uploadFile) + ")",
);
} else if (request.data) {
// TODO: parseQueryString
@ -191,7 +191,7 @@ export function _toPerl(requests: Request[], warnings: Warnings = []): string {
} else if (!("filename" in m)) {
// TODO: use File::Slurp;
lines.push(
reprHashKey(m.name) + " => read_file(" + repr(m.contentFile) + ")"
reprHashKey(m.name) + " => read_file(" + repr(m.contentFile) + ")",
);
} else {
let line = reprHashKey(m.name) + " => [" + repr(m.contentFile);
@ -203,7 +203,7 @@ export function _toPerl(requests: Request[], warnings: Warnings = []): string {
}
args.push(
"Content => [\n " + lines.join(",\n ") + "\n ]"
"Content => [\n " + lines.join(",\n ") + "\n ]",
);
}
}
@ -227,7 +227,7 @@ export function _toPerl(requests: Request[], warnings: Warnings = []): string {
export function toPerlWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toPerl(requests, warnings);

View File

@ -110,7 +110,7 @@ function jsonStrToPhp(obj: string, indent = 0): [string, boolean] {
export function _toPhpGuzzle(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
const url = request.urls[0].queryDict
@ -211,7 +211,7 @@ export function _toPhpGuzzle(
options += ` 'filename' => ${repr(m.filename)},\n`;
}
options += ` 'contents' => Psr7\\Utils::tryFopen(${repr(
m.contentFile
m.contentFile,
)}, 'r'),\n`;
imports.add("GuzzleHttp\\Psr7");
// TODO: set content type from file extension
@ -226,7 +226,7 @@ export function _toPhpGuzzle(
// TODO: remove some headers?
} else if (request.urls[0].uploadFile) {
options += ` 'body' => Psr7\\Utils::tryFopen(${repr(
request.urls[0].uploadFile
request.urls[0].uploadFile,
)}, 'r')\n`;
imports.add("GuzzleHttp\\Psr7");
} else if (request.data) {
@ -356,7 +356,7 @@ export function _toPhpGuzzle(
export function toPhpGuzzleWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const guzzle = _toPhpGuzzle(requests, warnings);

View File

@ -228,7 +228,7 @@ export function _toPhp(requests: Request[], warnings: Warnings = []): string {
export function toPhpWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const php = _toPhp(requests, warnings);

View File

@ -12,7 +12,7 @@ const supportedArgs = new Set([
export function _toPhpRequests(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
@ -92,7 +92,7 @@ export function _toPhpRequests(
}
export function toPhpRequestsWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const php = _toPhpRequests(requests, warnings);

View File

@ -109,7 +109,7 @@ function requestToPowershell(
request: Request,
url: RequestUrl,
restMethod: boolean,
warnings: Warnings
warnings: Warnings,
): string {
let code = "";
const command = restMethod ? "Invoke-RestMethod" : "Invoke-WebRequest";
@ -357,7 +357,7 @@ function requestToPowershell(
function toPowershell(
requests: Request[],
restMethod = true,
warnings: Warnings = []
warnings: Warnings = [],
): string {
const commands = [];
@ -393,14 +393,14 @@ function toPowershell(
export function _toPowershellWebRequest(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
return toPowershell(requests, false, warnings);
}
export function toPowershellWebRequestWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toPowershellWebRequest(requests, warnings);
@ -412,14 +412,14 @@ export function toPowershellWebRequest(curlCommand: string | string[]): string {
export function _toPowershellRestMethod(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
return toPowershell(requests, true, warnings);
}
export function toPowershellRestMethodWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toPowershellRestMethod(requests, warnings);

View File

@ -21,7 +21,7 @@ const supportedArgs = new Set([
export function _toPythonHttp(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const request = getFirst(requests, warnings);
@ -80,7 +80,7 @@ export function _toPythonHttp(
[dataAsJson, jsonRoundtrips] = formatDataAsJson(
request.dataArray[0],
imports,
osVars
osVars,
);
}
if (dataAsJson) {
@ -93,7 +93,7 @@ export function _toPythonHttp(
repr(
mergeWords(request.urls[0].urlObj.path, request.urls[0].urlObj.query),
osVars,
imports
imports,
),
];
if (dataAsJson) {
@ -114,7 +114,7 @@ export function _toPythonHttp(
args.push(
"open(" +
repr(request.urls[0].uploadFile, osVars, imports, false, true) +
", 'rb')"
", 'rb')",
);
}
} else if (
@ -126,7 +126,7 @@ export function _toPythonHttp(
args.push(
"open(" +
repr(request.dataArray[0].filename, osVars, imports, false, true) +
", 'rb')"
", 'rb')",
);
} else if (request.data) {
args.push(repr(request.data, osVars, imports));
@ -156,7 +156,7 @@ export function _toPythonHttp(
export function toPythonHttpWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toPythonHttp(requests, warnings);

View File

@ -201,7 +201,7 @@ export function repr(
// os.getenv('MYVAR') returns None if MYVAR is not set
// os.getenv('MYVAR', '') returns '' if MYVAR is not set but it's a bit more verbose,
// so setting errorOk to true will use the shorter version
errorOk = false
errorOk = false,
): string {
const reprFn = binary ? reprStrBinary : reprStr;
const reprs = [];
@ -261,7 +261,7 @@ function reprb(word: Word, osVars: OSVars, imports: Set<string>): string {
export function asFloat(
word: Word,
osVars: OSVars,
imports: Set<string>
imports: Set<string>,
): string {
if (word.isString()) {
// TODO: check it's actually a valid float
@ -273,7 +273,7 @@ export function asFloat(
export function asInt(
word: Word,
osVars: OSVars,
imports: Set<string>
imports: Set<string>,
): string {
if (word.isString()) {
// TODO: check it's actually a valid int
@ -423,14 +423,14 @@ function jsonDumps(obj: string | number | boolean | object | null): string {
);
default:
throw new CCError(
"unexpected object type that shouldn't appear in JSON: " + typeof obj
"unexpected object type that shouldn't appear in JSON: " + typeof obj,
);
}
}
function objToPython(
obj: string | number | boolean | object | null,
indent = 0
indent = 0,
): string {
if (isLosslessNumber(obj)) {
const numAsStr = jsonStringifyLossless(obj) as string;
@ -491,7 +491,7 @@ function objToPython(
}
default:
throw new CCError(
"unexpected object type that shouldn't appear in JSON: " + typeof obj
"unexpected object type that shouldn't appear in JSON: " + typeof obj,
);
}
}
@ -500,7 +500,7 @@ export function formatHeaders(
headers: Headers,
commentedOutHeaders: { [key: string]: string },
osVars: OSVars,
imports: Set<string>
imports: Set<string>,
): string {
// TODO: what if there are repeat headers
let headerDict = "headers = {\n";
@ -553,7 +553,7 @@ function decodePercentEncoding(s: Word): Word | null {
}
function dataEntriesToDict(
dataEntries: Array<[string, string]>
dataEntries: Array<[string, string]>,
): { [key: string]: Array<string> } | null {
// Group keys
// TODO: because keys can be code that reads from a file, those should not be considered the
@ -622,7 +622,7 @@ function formatDataAsEntries(
dataArray: DataParam[],
osVars: OSVars,
imports: Set<string>,
variableName: "data" | "params" = "data"
variableName: "data" | "params" = "data",
): [string, string] | null {
// This code is more complicated than you might expect because it needs
// to handle a --data-urlencode that reads from a file followed by --json
@ -779,7 +779,7 @@ function formatDataAsStr(
dataArray: DataParam[],
imports: Set<string>,
osVars: OSVars,
variableName: "data" | "params" = "data"
variableName: "data" | "params" = "data",
): [string, boolean] {
// If one of the arguments has to be binary, then they all have to be binary
// because we can't mix bytes and str.
@ -787,7 +787,7 @@ function formatDataAsStr(
// --data-binary @filename
// otherwise we could generate code that will try to read an image file as text and error.
const binary = dataArray.some(
(d) => !(d instanceof Word) && d.filetype === "binary"
(d) => !(d instanceof Word) && d.filetype === "binary",
);
const reprFunc = binary ? reprb : repr;
const prefix = binary ? "b" : "";
@ -889,7 +889,7 @@ function formatDataAsStr(
export function formatDataAsJson(
d: DataParam,
imports: Set<string>,
osVars: OSVars
osVars: OSVars,
): [string | null, boolean] {
if (d instanceof Word) {
if (!d.isString()) {
@ -942,7 +942,7 @@ export function formatDataAsJson(
function getDataString(
request: Request,
osVars: OSVars,
warnings: Warnings
warnings: Warnings,
): [string | null, boolean | null, string | null, Set<string>] {
const imports = new Set<string>();
if (!request.data || !request.dataArray) {
@ -974,7 +974,7 @@ function getDataString(
[dataAsJson, jsonRoundtrips] = formatDataAsJson(
request.dataArray[0],
imports,
osVars
osVars,
);
}
if (jsonRoundtrips) {
@ -992,7 +992,7 @@ function getDataString(
if (
eq(
request.headers.get("content-type"),
"application/x-www-form-urlencoded"
"application/x-www-form-urlencoded",
) &&
request.headers.length === 1
) {
@ -1013,7 +1013,7 @@ function getDataString(
const [dataAsString, shouldEncode] = formatDataAsStr(
request.dataArray,
imports,
osVars
osVars,
);
return [dataAsString, shouldEncode, dataAsJson, imports];
}
@ -1021,7 +1021,7 @@ function getDataString(
function getFilesString(
request: Request,
osVars: OSVars,
imports: Set<string>
imports: Set<string>,
): [string, boolean] {
let usesStdin = false;
if (!request.multipartUploads) {
@ -1070,7 +1070,7 @@ function getFilesString(
tuple.push(
"open(" +
repr(m.contentFile, osVars, imports, false, true) +
", 'rb')"
", 'rb')",
);
}
} else {
@ -1127,7 +1127,7 @@ function commentOut(s: string) {
function uniqueWarn(
seenWarnings: Set<string>,
warnings: Warnings,
warning: [string, string]
warning: [string, string],
) {
if (!seenWarnings.has(warning[0])) {
seenWarnings.add(warning[0]);
@ -1152,7 +1152,7 @@ function requestToPython(
request: Request,
warnings: Warnings = [],
imports: Set<string>,
thirdPartyImports: Set<string>
thirdPartyImports: Set<string>,
): string {
const osVars: OSVars = {};
const commentedOutHeaders: { [key: string]: string } = {
@ -1285,7 +1285,7 @@ function requestToPython(
paramArray,
osVars,
imports,
"params"
"params",
);
if (queryAsEntries !== null) {
let percentWarn;
@ -1304,7 +1304,7 @@ function requestToPython(
paramArray,
imports,
osVars,
"params"
"params",
);
}
}
@ -1336,7 +1336,7 @@ function requestToPython(
[dataString, shouldEncode, jsonDataString, dataImports] = getDataString(
request,
osVars,
warnings
warnings,
);
dataImports.forEach(imports.add, imports);
// Remove "Content-Type" from the headers dict
@ -1377,7 +1377,7 @@ function requestToPython(
request.headers,
commentedOutHeaders,
osVars,
imports
imports,
);
}
@ -1489,7 +1489,7 @@ function requestToPython(
urlObj.queryArray,
osVars,
imports,
"params"
"params",
);
if (urlQueryAsEntries !== null) {
let percentWarn;
@ -1509,7 +1509,7 @@ function requestToPython(
urlObj.queryArray,
imports,
osVars,
"params"
"params",
);
url = urlObj.urlWithoutQueryArray;
}
@ -1629,7 +1629,7 @@ function requestToPython(
"--aws-sigv4",
"--aws-sigv4 value isn't parsed: " +
JSON.stringify(
request.awsSigV4 ? request.awsSigV4.toString() : ""
request.awsSigV4 ? request.awsSigV4.toString() : "",
),
]);
break;
@ -1832,7 +1832,7 @@ export function printImports(imps: Set<string>): string {
export function _toPython(
requests: Request[],
warnings: Warnings = []
warnings: Warnings = [],
): string {
const code = [];
let joinTwoLines = false;
@ -1843,7 +1843,7 @@ export function _toPython(
request,
warnings,
imports,
thirdPartyImports
thirdPartyImports,
);
code.push(requestCode);
@ -1870,7 +1870,7 @@ export function _toPython(
export function toPythonWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const python = _toPython(requests, warnings);

View File

@ -203,7 +203,7 @@ export function _toR(requests: Request[], warnings: Warnings = []): string {
// TODO: GET() and HEAD() don't support sending data, detect and use VERB() instead
if (
["GET", "HEAD", "PATCH", "PUT", "DELETE", "POST"].includes(
request.urls[0].method.toString()
request.urls[0].method.toString(),
)
) {
requestLine += request.urls[0].method.toString() + "(";
@ -271,7 +271,7 @@ export function _toR(requests: Request[], warnings: Warnings = []): string {
}
export function toRWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const rHttr = _toR(requests, warnings);

View File

@ -43,8 +43,8 @@ export function reprStr(s: string, quote?: "'" | '"' | "{}"): string {
quote === "'"
? regexSingleEscape
: quote === '"'
? regexDoubleEscape
: regexCurlyEscape;
? regexDoubleEscape
: regexCurlyEscape;
const startQuote = quote[0];
const endQuote = quote === "{}" ? quote[1] : quote[0];
@ -117,7 +117,7 @@ function repr(w: Word): string {
function objToRuby(
obj: Word | Word[] | string | number | boolean | object | null,
indent = 0
indent = 0,
): string {
if (obj instanceof Word) {
return repr(obj);
@ -165,7 +165,7 @@ function objToRuby(
}
default:
throw new CCError(
"unexpected object type that shouldn't appear in JSON: " + typeof obj
"unexpected object type that shouldn't appear in JSON: " + typeof obj,
);
}
}
@ -332,7 +332,7 @@ function getFilesString(request: Request): string {
function requestToRuby(
request: Request,
warnings: Warnings,
imports: Set<string>
imports: Set<string>,
): string {
warnIfPartsIgnored(request, warnings, { dataReadsFile: true });
if (
@ -461,7 +461,7 @@ function requestToRuby(
for (const [headerName, headerValue] of request.headers) {
if (
["accept-encoding", "content-length"].includes(
headerName.toLowerCase().toString()
headerName.toLowerCase().toString(),
)
) {
code += "# ";
@ -544,7 +544,7 @@ export function _toRuby(requests: Request[], warnings: Warnings = []): string {
export function toRubyWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const ruby = _toRuby(requests, warnings);

View File

@ -89,9 +89,9 @@ export function _toRust(requests: Request[], warnings: Warnings = []): string {
indent(
`headers.insert(${name}, ${repr(
headerValue,
imports
)}.parse().unwrap());`
)
imports,
)}.parse().unwrap());`,
),
);
}
}
@ -104,12 +104,12 @@ export function _toRust(requests: Request[], warnings: Warnings = []): string {
if ("contentFile" in m) {
return indent(
`.file(${repr(m.name, imports)}, ${repr(m.contentFile, imports)})?`,
2
2,
);
}
return indent(
`.text(${repr(m.name, imports)}, ${repr(m.content, imports)})`,
2
2,
);
});
parts[parts.length - 1] += ";";
@ -130,8 +130,8 @@ export function _toRust(requests: Request[], warnings: Warnings = []): string {
lines.push(
indent(
".redirect(reqwest::redirect::Policy::custom(|attempt| { attempt.follow() }))",
2
)
2,
),
);
} else {
// Insert the --max-redirs value as-is, hoping it's a valid integer
@ -140,8 +140,8 @@ export function _toRust(requests: Request[], warnings: Warnings = []): string {
".redirect(reqwest::redirect::Policy::limited(" +
request.maxRedirects.trim().toString() +
"))",
2
)
2,
),
);
}
lines.push(indent(".build()", 2));
@ -154,18 +154,18 @@ export function _toRust(requests: Request[], warnings: Warnings = []): string {
indent(
`let res = client.${request.urls[0].method.toLowerCase()}(${repr(
request.urls[0].url,
imports
)})`
)
imports,
)})`,
),
);
} else {
lines.push(
indent(
`let res = client.request(${repr(
request.urls[0].method,
imports
)}, ${repr(request.urls[0].url, imports)})`
)
imports,
)}, ${repr(request.urls[0].url, imports)})`,
),
);
}
@ -174,8 +174,8 @@ export function _toRust(requests: Request[], warnings: Warnings = []): string {
lines.push(
indent(
`.basic_auth(${repr(user, imports)}, Some(${repr(password, imports)}))`,
2
)
2,
),
);
}
@ -194,7 +194,7 @@ export function _toRust(requests: Request[], warnings: Warnings = []): string {
indent('.body(r#"', 2),
request.data.toString(), // TODO: this is wrong
'"#',
indent(")", 2)
indent(")", 2),
);
} else {
lines.push(indent(`.body(${repr(request.data, imports)})`, 2));
@ -207,7 +207,7 @@ export function _toRust(requests: Request[], warnings: Warnings = []): string {
indent('println!("{}", res);'),
"",
indent("Ok(())"),
"}"
"}",
);
const preambleLines = ["extern crate reqwest;"];
@ -234,7 +234,7 @@ export function _toRust(requests: Request[], warnings: Warnings = []): string {
}
export function toRustWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const rust = _toRust(requests, warnings);

View File

@ -38,7 +38,7 @@ export function repr(w: Word): string {
args.push(reprStr(t));
} else if (t.type === "variable") {
args.push(
"(ProcessInfo.processInfo.environment[" + reprStr(t.value) + '] ?? "")'
"(ProcessInfo.processInfo.environment[" + reprStr(t.value) + '] ?? "")',
);
} else {
args.push("exec(" + reprStr(t.value) + ")");
@ -224,7 +224,7 @@ export function _toSwift(requests: Request[], warnings: Warnings = []): string {
export function toSwiftWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const code = _toSwift(requests, warnings);

View File

@ -231,7 +231,7 @@ function requestToWget(request: Request, warnings: Warnings): string {
}
if (request.urls.length > 1) {
const uniqueMethods = new Set<string>(
request.urls.map((u) => u.method.toString())
request.urls.map((u) => u.method.toString()),
);
// TODO: add tons of checks/warnings that wget doesn't let you set things per-URL
@ -269,7 +269,7 @@ function requestToWget(request: Request, warnings: Warnings): string {
if (request.headers.length) {
for (const [headerName, headerValue] of request.headers) {
args.push(
"--header=" + repr(mergeWords(headerName, ": ", headerValue ?? ""))
"--header=" + repr(mergeWords(headerName, ": ", headerValue ?? "")),
);
// TODO: there's also --referer, --user-agent and --content-disposition
}
@ -307,7 +307,7 @@ function requestToWget(request: Request, warnings: Warnings): string {
if (
!["none", "basic", "digest", "ntlm", "ntlm-wb", "negotiate"].includes(
request.authType
request.authType,
)
) {
warnings.push([
@ -521,7 +521,7 @@ export function _toWget(requests: Request[], warnings: Warnings = []): string {
export function toWgetWarn(
curlCommand: string | string[],
warnings: Warnings = []
warnings: Warnings = [],
): [string, Warnings] {
const requests = parse(curlCommand, supportedArgs, warnings);
const wget = _toWget(requests, warnings);

View File

@ -25,7 +25,7 @@ export function clip(s: string, maxLength = 30): string {
function findCommands(
curlCommand: string | string[],
warnings: Warnings
warnings: Warnings,
): [Word[], Word?, Word?][] {
if (typeof curlCommand === "string") {
return tokenize(curlCommand, warnings);
@ -37,7 +37,7 @@ function findCommands(
if (curlCommand[0].trim() !== "curl") {
throw new CCError(
'command should begin with "curl" but instead begins with ' +
JSON.stringify(clip(curlCommand[0]))
JSON.stringify(clip(curlCommand[0])),
);
}
return [[curlCommand.map((arg) => new Word(arg)), undefined, undefined]];
@ -52,7 +52,7 @@ function findCommands(
export function parse(
command: string | string[],
supportedArgs?: Set<string>,
warnings: Warnings = []
warnings: Warnings = [],
): Request[] {
let requests: Request[] = [];
const curlCommands = findCommands(command, warnings);
@ -63,7 +63,7 @@ export function parse(
curlLongOptsShortened,
curlShortOpts,
supportedArgs,
warnings
warnings,
);
requests = requests.concat(buildRequests(globalConfig, stdin, stdinFile));

View File

@ -331,13 +331,13 @@ export class Word implements Iterable<Token> {
toLowerCase(): Word {
return new Word(
this.tokens.map((t) => (typeof t === "string" ? t.toLowerCase() : t))
this.tokens.map((t) => (typeof t === "string" ? t.toLowerCase() : t)),
);
}
toUpperCase(): Word {
return new Word(
this.tokens.map((t) => (typeof t === "string" ? t.toUpperCase() : t))
this.tokens.map((t) => (typeof t === "string" ? t.toUpperCase() : t)),
);
}
@ -472,7 +472,7 @@ export class Word implements Iterable<Token> {
export function eq(
it: Word | undefined | null,
other: string | Word | undefined | null
other: string | Word | undefined | null,
): boolean {
if (
it === undefined ||

View File

@ -63,7 +63,7 @@ function removeAnsiCBackslashes(str: string): string {
throw new CCError(
'non-ASCII control character in ANSI-C quoted string: "\\u{' +
m.codePointAt(2)!.toString(16) +
'}"'
'}"',
);
}
// If this produces a 0x00 (null) character, it will cause bash to
@ -72,7 +72,7 @@ function removeAnsiCBackslashes(str: string): string {
return m[2] === "?"
? "\x7F"
: String.fromCodePoint(
m[2].toUpperCase().codePointAt(0)! & 0b00011111
m[2].toUpperCase().codePointAt(0)! & 0b00011111,
);
case "x":
case "u":
@ -93,7 +93,7 @@ function removeAnsiCBackslashes(str: string): string {
default:
// There must be a mis-match between ANSI_BACKSLASHES and the switch statement
throw new CCError(
"unhandled character in ANSI-C escape code: " + JSON.stringify(m)
"unhandled character in ANSI-C escape code: " + JSON.stringify(m),
);
}
}
@ -104,7 +104,7 @@ function removeAnsiCBackslashes(str: string): string {
function toTokens(
node: Parser.SyntaxNode,
curlCommand: string,
warnings: Warnings
warnings: Warnings,
): Token[] {
let vals: Token[] = [];
switch (node.type) {
@ -123,7 +123,7 @@ function toTokens(
let res = "";
for (const child of node.namedChildren) {
res += removeDoubleQuoteBackslashes(
node.text.slice(prevEnd, child.startIndex - node.startIndex)
node.text.slice(prevEnd, child.startIndex - node.startIndex),
);
// expansion, simple_expansion or command_substitution (or concat?)
const subVal = toTokens(child, curlCommand, warnings);
@ -242,7 +242,7 @@ function toTokens(
"unexpected argument type " +
JSON.stringify(node.type) +
'. Must be one of "word", "string", "raw_string", "ansi_c_string", "expansion", "simple_expansion", "translated_string" or "concatenation"\n' +
underlineNode(node, curlCommand)
underlineNode(node, curlCommand),
);
}
}
@ -250,7 +250,7 @@ function toTokens(
function toWord(
node: Parser.SyntaxNode,
curlCommand: string,
warnings: Warnings
warnings: Warnings,
): Word {
return new Word(toTokens(node, curlCommand, warnings));
}
@ -258,7 +258,7 @@ function toWord(
function warnAboutErrorNodes(
ast: Parser.Tree,
curlCommand: string,
warnings: Warnings
warnings: Warnings,
) {
// TODO: get only named children?
const cursor = ast.walk();
@ -286,7 +286,7 @@ function warnAboutErrorNodes(
function warnAboutUselessBackslash(
n: Parser.SyntaxNode,
curlCommandLines: string[],
warnings: Warnings
warnings: Warnings,
) {
const lastCommandLine = curlCommandLines[n.endPosition.row];
const impromperBackslash = lastCommandLine.match(/\\\s+$/);
@ -312,7 +312,7 @@ function warnAboutUselessBackslash(
function extractRedirect(
node: Parser.SyntaxNode,
curlCommand: string,
warnings: Warnings
warnings: Warnings,
): [Parser.SyntaxNode, Word?, Word?] {
if (!node.childCount) {
throw new CCError('got empty "redirected_statement" AST node');
@ -325,12 +325,12 @@ function extractRedirect(
'got "redirected_statement" AST node whose first child is not a "command", got ' +
command.type +
" instead\n" +
underlineNode(command, curlCommand)
underlineNode(command, curlCommand),
);
}
if (node.childCount < 2) {
throw new CCError(
'got "redirected_statement" AST node with only one child - no redirect'
'got "redirected_statement" AST node with only one child - no redirect',
);
}
if (redirects.length > 1) {
@ -351,14 +351,14 @@ function extractRedirect(
// https://github.com/tree-sitter/tree-sitter-bash/issues/118
if (redirect.namedChildCount < 1) {
throw new CCError(
'got "redirected_statement" AST node with heredoc but no heredoc start'
'got "redirected_statement" AST node with heredoc but no heredoc start',
);
}
const heredocStart = redirect.namedChildren[0].text;
const heredocBody = node.nextNamedSibling;
if (!heredocBody) {
throw new CCError(
'got "redirected_statement" AST node with no heredoc body'
'got "redirected_statement" AST node with no heredoc body',
);
}
// TODO: herestrings and heredocs are different
@ -366,7 +366,7 @@ function extractRedirect(
throw new CCError(
'got "redirected_statement" AST node with heredoc but no heredoc body, got ' +
heredocBody.type +
" instead"
" instead",
);
}
// TODO: heredocs can do variable expansion and stuff
@ -379,7 +379,7 @@ function extractRedirect(
} else if (redirect.type === "herestring_redirect") {
if (redirect.namedChildCount < 1 || !redirect.firstNamedChild) {
throw new CCError(
'got "redirected_statement" AST node with empty herestring'
'got "redirected_statement" AST node with empty herestring',
);
}
// TODO: this just converts bash code to text
@ -388,7 +388,7 @@ function extractRedirect(
throw new CCError(
'got "redirected_statement" AST node whose second child is not one of "file_redirect", "heredoc_redirect" or "herestring_redirect", got ' +
command.type +
" instead"
" instead",
);
}
return [command, stdin, stdinFile];
@ -397,7 +397,7 @@ function extractRedirect(
function _findCurlInPipeline(
node: Parser.SyntaxNode,
curlCommand: string,
warnings: Warnings
warnings: Warnings,
): [Parser.SyntaxNode?, Word?, Word?] {
let command, stdin, stdinFile;
for (const child of node.namedChildren) {
@ -408,7 +408,7 @@ function _findCurlInPipeline(
'got "command" AST node whose first child is not a "command_name", got ' +
commandName.type +
" instead\n" +
underlineNode(commandName, curlCommand)
underlineNode(commandName, curlCommand),
);
}
const commandNameWord = commandName.namedChildren[0];
@ -417,7 +417,7 @@ function _findCurlInPipeline(
'got "command_name" AST node whose first child is not a "word", got ' +
commandNameWord.type +
" instead\n" +
underlineNode(commandNameWord, curlCommand)
underlineNode(commandNameWord, curlCommand),
);
}
if (commandNameWord.text === "curl") {
@ -435,7 +435,7 @@ function _findCurlInPipeline(
const [redirCommand, redirStdin, redirStdinFile] = extractRedirect(
child,
curlCommand,
warnings
warnings,
);
if (redirCommand.namedChildren[0].text === "curl") {
if (!command) {
@ -458,7 +458,7 @@ function _findCurlInPipeline(
const [nestedCommand, nestedStdin, nestedStdinFile] = _findCurlInPipeline(
child,
curlCommand,
warnings
warnings,
);
if (!nestedCommand) {
continue;
@ -488,17 +488,17 @@ function _findCurlInPipeline(
function findCurlInPipeline(
node: Parser.SyntaxNode,
curlCommand: string,
warnings: Warnings
warnings: Warnings,
): [Parser.SyntaxNode, Word?, Word?] {
const [command, stdin, stdinFile] = _findCurlInPipeline(
node,
curlCommand,
warnings
warnings,
);
if (!command) {
throw new CCError(
"could not find curl command in pipeline\n" +
underlineNode(node, curlCommand)
underlineNode(node, curlCommand),
);
}
return [command, stdin, stdinFile];
@ -509,7 +509,7 @@ function findCurlInPipeline(
function extractCommandNodes(
ast: Parser.Tree,
curlCommand: string,
warnings: Warnings
warnings: Warnings,
): [Parser.SyntaxNode, Word?, Word?][] {
// https://github.com/tree-sitter/tree-sitter-bash/blob/master/grammar.js
// The AST must be in a nice format, i.e.
@ -538,7 +538,7 @@ function extractCommandNodes(
// TODO: expand "AST" acronym the first time it appears in an error message
'expected a "program" top-level AST node, got ' +
ast.rootNode.type +
" instead"
" instead",
);
}
@ -573,7 +573,7 @@ function extractCommandNodes(
case "ERROR":
throw new CCError(
`Bash parsing error on line ${n.startPosition.row + 1}:\n` +
underlineNode(n, curlCommand)
underlineNode(n, curlCommand),
);
default:
// TODO: better error message.
@ -581,7 +581,7 @@ function extractCommandNodes(
"found " +
JSON.stringify(n.type) +
' AST node, only "command", "pipeline" or "redirected_statement" are supported\n' +
underlineNode(n, curlCommand)
underlineNode(n, curlCommand),
);
}
}
@ -590,7 +590,7 @@ function extractCommandNodes(
// We would probably need to keep track of the node types we've seen.
throw new CCError(
'expected a "command" or "redirected_statement" AST node' +
(sawComment ? ', only found "comment" nodes' : "")
(sawComment ? ', only found "comment" nodes' : ""),
);
}
@ -600,12 +600,12 @@ function extractCommandNodes(
function toNameAndArgv(
command: Parser.SyntaxNode,
curlCommand: string,
warnings: Warnings
warnings: Warnings,
): [Parser.SyntaxNode, Parser.SyntaxNode[]] {
if (command.childCount < 1) {
// TODO: better error message.
throw new CCError(
'empty "command" node\n' + underlineNode(command, curlCommand)
'empty "command" node\n' + underlineNode(command, curlCommand),
);
}
@ -630,7 +630,7 @@ function toNameAndArgv(
'expected "command_name", "variable_assignment" or "file_redirect" AST node, found ' +
n.type +
" instead\n" +
underlineNode(n, curlCommand)
underlineNode(n, curlCommand),
);
}
break;
@ -643,7 +643,7 @@ function toNameAndArgv(
if (name === undefined) {
throw new CCError(
'found "command" AST node with no "command_name" child\n' +
underlineNode(command, curlCommand)
underlineNode(command, curlCommand),
);
}
@ -654,11 +654,12 @@ function toNameAndArgv(
function nameToWord(
name: Parser.SyntaxNode,
curlCommand: string,
warnings: Warnings
warnings: Warnings,
): Word {
if (name.childCount < 1 || !name.firstChild) {
throw new CCError(
'found empty "command_name" AST node\n' + underlineNode(name, curlCommand)
'found empty "command_name" AST node\n' +
underlineNode(name, curlCommand),
);
} else if (name.childCount > 1) {
warnings.push([
@ -682,7 +683,7 @@ function nameToWord(
"expected command name to be a simple value but found a " +
cmdNameShellToken.type +
"\n" +
underlineNode(cmdNameShellToken.syntaxNode, curlCommand)
underlineNode(cmdNameShellToken.syntaxNode, curlCommand),
);
}
} else if (nameWordStr.trim() !== "curl") {
@ -690,14 +691,14 @@ function nameToWord(
if (!c) {
throw new CCError(
"found command without a command_name\n" +
underlineNode(nameNode, curlCommand)
underlineNode(nameNode, curlCommand),
);
}
throw new CCError(
'command should begin with "curl" but instead begins with ' +
JSON.stringify(clip(c)) +
"\n" +
underlineNode(nameNode, curlCommand)
underlineNode(nameNode, curlCommand),
);
}
return nameWord;
@ -705,7 +706,7 @@ function nameToWord(
export function tokenize(
curlCommand: string,
warnings: Warnings = []
warnings: Warnings = [],
): [Word[], Word?, Word?][] {
const ast = parser.parse(curlCommand);
warnAboutErrorNodes(ast, curlCommand, warnings);

View File

@ -6,7 +6,7 @@ export const UTF8encoder = new TextEncoder();
// TODO: replace with Object.hasOwn() once Node 16 is EOL'd on 2023-09-11
export function has<T, K extends PropertyKey>(
obj: T,
prop: K
prop: K,
): obj is T & Record<K, unknown> {
return Object.prototype.hasOwnProperty.call(obj, prop);
}

View File

@ -21,7 +21,7 @@ function stringifyWords(o: any): any {
Object.entries(o).map((oo) => [
stringifyWords(oo[0]),
stringifyWords(oo[1]),
])
]),
);
}
return o;
@ -251,7 +251,7 @@ type Converter = keyof typeof converters;
// Check that we have at least one test for every generator
// https://github.com/curlconverter/curlconverter/pull/299
const testedConverters = Object.entries(converters).map(
(c) => c[1].converter.name
(c) => c[1].converter.name,
);
const untestedConverters = ["toPhpRequests"];
const notConverterExports = ["Word"];
@ -264,17 +264,17 @@ const missing = availableConverters.filter(
!testedConverters.includes(c) &&
!untestedConverters.includes(c) &&
!notConverterExports.includes(c) &&
!c.endsWith("Warn")
!c.endsWith("Warn"),
);
const extra = testedConverters.filter(
(c) => !availableConverters.includes(c) && c !== "toParser"
(c) => !availableConverters.includes(c) && c !== "toParser",
);
if (missing.length) {
console.error("these converters are not tested: " + missing.join(", "));
}
if (extra.length) {
console.error(
"these non-existant converters are being tested: " + extra.join(", ")
"these non-existant converters are being tested: " + extra.join(", "),
);
}
for (const [converterName, converter] of Object.entries(converters)) {
@ -291,12 +291,12 @@ for (const [converterName, converter] of Object.entries(converters)) {
testDir +
" doesn't have any files ending with '" +
converter.extension +
"'"
"'",
);
}
} else {
console.error(
converterName + " doesn't have a corresponding directory in fixtures/"
converterName + " doesn't have a corresponding directory in fixtures/",
);
}
}

View File

@ -19,7 +19,7 @@ const curlCommandsDir = path.resolve(fixturesDir, "curl_commands");
const testArgs = await yargs(hideBin(process.argv))
.scriptName("test.js")
.usage(
"Usage: $0 [--language <language>] [--test <test_name>] [test_name...]"
"Usage: $0 [--language <language>] [--test <test_name>] [test_name...]",
)
.option("l", {
alias: "language",
@ -59,7 +59,7 @@ const testFileNames =
t
.toString()
.replace(/ /g, "_")
.replace(/(\.sh)?$/, ".sh")
.replace(/(\.sh)?$/, ".sh"),
)
: fs.readdirSync(curlCommandsDir).filter((f) => f.endsWith(".sh")); // if no --test specified, run them all
@ -80,7 +80,7 @@ for (const fileName of testFileNames) {
const filePath = path.resolve(
fixturesDir,
outputLanguage,
fileName.replace(/\.sh$/, converter.extension)
fileName.replace(/\.sh$/, converter.extension),
);
const testName = fileName.replace(/_/g, " ").replace(/\.sh$/, "");
const fullTestName = converter.name + ": " + testName;
@ -95,7 +95,7 @@ for (const fileName of testFileNames) {
actual = converter.converter(inputFileContents);
} catch (e) {
console.error(
"Failed converting " + fileName + " to " + converter.name + ":"
"Failed converting " + fileName + " to " + converter.name + ":",
);
console.error(inputFileContents);
console.error(e);

View File

@ -60,7 +60,7 @@ const executables = {
fs.writeFileSync(
"/tmp/curlconverter/httpie/main",
contents.trimEnd() + " --ignore-stdin" + "\n",
"utf8"
"utf8",
);
},
exec: "chmod +x /tmp/curlconverter/httpie/main && /tmp/curlconverter/httpie/main",
@ -80,7 +80,7 @@ const executables = {
"\n" +
" }\n" +
"}\n",
"utf8"
"utf8",
);
},
exec: "cd /tmp/curlconverter/java && javac Main.java && java Main",
@ -122,7 +122,7 @@ const executables = {
"/tmp/curlconverter/java-jsoup/src/main/java/com/mycompany/app/Main.java",
`package com.mycompany.app;\n\n` +
contents.replace("class Main", "public class Main"),
"utf8"
"utf8",
);
},
exec: "cd /tmp/curlconverter/java-jsoup && mvn compile && mvn exec:java -Dexec.mainClass=com.mycompany.app.Main",
@ -146,7 +146,7 @@ import jQueryInit from 'jquery';
var $ = jQueryInit(window);
` + contents,
"utf8"
"utf8",
);
},
exec: "cd /tmp/curlconverter/javascript-jquery && node main.js",
@ -158,7 +158,7 @@ var $ = jQueryInit(window);
fs.writeFileSync(
"/tmp/curlconverter/javascript-xhr/main.js",
"import { XMLHttpRequest } from 'xmlhttprequest';\n\n" + contents,
"utf8"
"utf8",
);
},
exec: "cd /tmp/curlconverter/javascript-xhr && node main.js",
@ -171,7 +171,7 @@ var $ = jQueryInit(window);
fs.writeFileSync(
"/tmp/curlconverter/kotlin/script.main.kts",
'@file:DependsOn("com.squareup.okhttp3:okhttp:4.11.0")\n\n' + contents,
"utf8"
"utf8",
);
},
exec: "cd /tmp/curlconverter/kotlin && kotlin script.main.kts",
@ -199,9 +199,9 @@ var $ = jQueryInit(window);
"/tmp/curlconverter/node-http/main.js",
contents.replace(
"hostname: 'localhost:28139',",
"hostname: 'localhost', port: 28139,"
"hostname: 'localhost', port: 28139,",
),
"utf8"
"utf8",
);
},
exec: "cd /tmp/curlconverter/node-http && node main.js",
@ -222,23 +222,23 @@ var $ = jQueryInit(window);
"#import <Foundation/Foundation.h>\n" +
"\n" +
"int main(int argc, const char * argv[]) {\n" +
" @autoreleasepool {\n"
" @autoreleasepool {\n",
)
.replace(
"NSURLSession *session = ",
"\ndispatch_semaphore_t semaphore = dispatch_semaphore_create(0);\nNSURLSession *session = "
"\ndispatch_semaphore_t semaphore = dispatch_semaphore_create(0);\nNSURLSession *session = ",
)
.replace(
' NSLog(@"%@", httpResponse);\n' + " }\n",
' NSLog(@"%@", httpResponse);\n' +
" }\n" +
"dispatch_semaphore_signal(semaphore);\n"
"dispatch_semaphore_signal(semaphore);\n",
) +
"dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);\n" +
" }\n" +
" return 0;\n" +
"}\n",
"utf8"
"utf8",
);
},
exec: "cd /tmp/curlconverter/objectivec && clang -framework Foundation main.m -o main && ./main",
@ -250,7 +250,7 @@ var $ = jQueryInit(window);
contents
.replace(
"\nlet uri = Uri.of_string ",
"let body =\nlet uri = Uri.of_string "
"let body =\nlet uri = Uri.of_string ",
)
.replace(
" (* Do stuff with the result *)\n",
@ -259,9 +259,9 @@ var $ = jQueryInit(window);
"" +
"let () =\n" +
" let body = Lwt_main.run body in\n" +
" print_endline body\n"
" print_endline body\n",
),
"utf8"
"utf8",
);
},
exec: "cd /tmp/curlconverter/ocaml && eval `opam config env` && ocamlbuild -use-ocamlfind -tag thread -pkg cohttp-lwt-unix main.native && ./main.native",
@ -305,11 +305,11 @@ var $ = jQueryInit(window);
contents
.replace(
"import Foundation\n",
"import Foundation\n\n// testing\nlet group = DispatchGroup()\ngroup.enter()\n"
"import Foundation\n\n// testing\nlet group = DispatchGroup()\ngroup.enter()\n",
)
.replace(
'print(str ?? "")\n }\n',
'print(str ?? "")\n }\n\n // testing\n group.leave()\n'
'print(str ?? "")\n }\n\n // testing\n group.leave()\n',
) +
`
// testing
@ -318,7 +318,7 @@ group.notify(queue: .main) {
}
dispatchMain()\n`,
"utf8"
"utf8",
);
},
exec: "cd /tmp/curlconverter/swift && swift main.swift",
@ -361,7 +361,7 @@ const languages: (keyof typeof executables)[] = Array.isArray(argv.language)
const testFile = async (
testFilename: string,
languages: (keyof typeof executables)[]
languages: (keyof typeof executables)[],
): Promise<void> => {
const rawRequests: string[] = [];
@ -409,7 +409,7 @@ const testFile = async (
const inputFile = path.join(
fixturesDir,
"curl_commands",
testFilename + ".sh"
testFilename + ".sh",
);
if (!fs.existsSync(inputFile)) {
server.close();
@ -433,7 +433,7 @@ const testFile = async (
} catch (e) {}
const files = languages.map((l: keyof typeof executables) =>
path.join(fixturesDir, l, testFilename + converters[l].extension)
path.join(fixturesDir, l, testFilename + converters[l].extension),
);
for (let i = 0; i < languages.length; i++) {
const language = languages[i];
@ -458,7 +458,7 @@ const testFile = async (
}
} else {
console.error(
language + " file doesn't exist, skipping: " + languageFile
language + " file doesn't exist, skipping: " + languageFile,
);
}
}
@ -521,8 +521,8 @@ if (!tests.length) {
path.join(
fixturesDir,
l,
testFile.replace(".sh", converters[l].extension)
)
testFile.replace(".sh", converters[l].extension),
),
)
) {
tests.push(testFile);