aboutsummaryrefslogtreecommitdiff
path: root/pw_tokenizer/ts/detokenizer.ts
diff options
context:
space:
mode:
Diffstat (limited to 'pw_tokenizer/ts/detokenizer.ts')
-rw-r--r--pw_tokenizer/ts/detokenizer.ts39
1 files changed, 19 insertions, 20 deletions
diff --git a/pw_tokenizer/ts/detokenizer.ts b/pw_tokenizer/ts/detokenizer.ts
index fe6ea910a..8137d3471 100644
--- a/pw_tokenizer/ts/detokenizer.ts
+++ b/pw_tokenizer/ts/detokenizer.ts
@@ -13,10 +13,10 @@
// the License.
/** Decodes and detokenizes strings from binary or Base64 input. */
-import {Buffer} from 'buffer';
-import {Frame} from 'pigweedjs/pw_hdlc';
-import {TokenDatabase} from './token_database';
-import {PrintfDecoder} from './printf_decoder';
+import { Buffer } from 'buffer';
+import { Frame } from 'pigweedjs/pw_hdlc';
+import { TokenDatabase } from './token_database';
+import { PrintfDecoder } from './printf_decoder';
const MAX_RECURSIONS = 9;
const BASE64CHARS = '[A-Za-z0-9+/-_]';
@@ -27,7 +27,7 @@ const PATTERN = new RegExp(
`(?:${BASE64CHARS}{4})*` +
// The last block of 4 chars may have one or two padding chars (=).
`(?:${BASE64CHARS}{3}=|${BASE64CHARS}{2}==)?`,
- 'g'
+ 'g',
);
interface TokenAndArgs {
@@ -61,7 +61,7 @@ export class Detokenizer {
* returned as string as-is.
*/
detokenizeUint8Array(data: Uint8Array): string {
- const {token, args} = this.decodeUint8Array(data);
+ const { token, args } = this.decodeUint8Array(data);
// Parse arguments if this is printf-style text.
const format = this.database.get(token);
if (format) {
@@ -80,7 +80,7 @@ export class Detokenizer {
*/
detokenizeBase64(
tokenizedFrame: Frame,
- maxRecursion: number = MAX_RECURSIONS
+ maxRecursion: number = MAX_RECURSIONS,
): string {
const base64String = new TextDecoder().decode(tokenizedFrame.data);
return this.detokenizeBase64String(base64String, maxRecursion);
@@ -88,16 +88,16 @@ export class Detokenizer {
private detokenizeBase64String(
base64String: string,
- recursions: number
+ recursions: number,
): string {
- return base64String.replace(PATTERN, base64Substring => {
- const {token, args} = this.decodeBase64TokenFrame(base64Substring);
+ return base64String.replace(PATTERN, (base64Substring) => {
+ const { token, args } = this.decodeBase64TokenFrame(base64Substring);
const format = this.database.get(token);
// Parse arguments if this is printf-style text.
if (format) {
const decodedOriginal = new PrintfDecoder().decode(
String(format),
- args
+ args,
);
// Detokenize nested Base64 tokens and their arguments.
if (recursions > 0) {
@@ -110,14 +110,13 @@ export class Detokenizer {
}
private decodeUint8Array(data: Uint8Array): TokenAndArgs {
- const token = new DataView(
- data.buffer,
- data.byteOffset,
- 4
- ).getUint32(0, true);
+ const token = new DataView(data.buffer, data.byteOffset, 4).getUint32(
+ 0,
+ true,
+ );
const args = new Uint8Array(data.buffer.slice(data.byteOffset + 4));
- return {token, args};
+ return { token, args };
}
private decodeBase64TokenFrame(base64Data: string): TokenAndArgs {
@@ -125,15 +124,15 @@ export class Detokenizer {
const prefixRemoved = base64Data.slice(1);
const noBase64 = Buffer.from(prefixRemoved, 'base64').toString('binary');
// Convert back to bytes and return token and arguments.
- const bytes = noBase64.split('').map(ch => ch.charCodeAt(0));
+ const bytes = noBase64.split('').map((ch) => ch.charCodeAt(0));
const uIntArray = new Uint8Array(bytes);
const token = new DataView(
uIntArray.buffer,
uIntArray.byteOffset,
- 4
+ 4,
).getUint32(0, true);
const args = new Uint8Array(bytes.slice(4));
- return {token, args};
+ return { token, args };
}
}