mirror of
https://github.com/laurent22/joplin.git
synced 2024-12-21 09:38:01 +02:00
Chore: Mobile: Update fsDriver
in preparation for mobile plugins (#10066)
This commit is contained in:
parent
20f8bb76f7
commit
9d17ab429d
@ -582,8 +582,16 @@ packages/app-mobile/utils/autodetectTheme.js
|
||||
packages/app-mobile/utils/checkPermissions.js
|
||||
packages/app-mobile/utils/createRootStyle.js
|
||||
packages/app-mobile/utils/debounce.js
|
||||
packages/app-mobile/utils/fs-driver/constants.js
|
||||
packages/app-mobile/utils/fs-driver/fs-driver-rn.js
|
||||
packages/app-mobile/utils/fs-driver/runOnDeviceTests.js
|
||||
packages/app-mobile/utils/fs-driver/tarCreate.js
|
||||
packages/app-mobile/utils/fs-driver/tarExtract.test.js
|
||||
packages/app-mobile/utils/fs-driver/tarExtract.js
|
||||
packages/app-mobile/utils/fs-driver/testUtil/createFilesFromPathRecord.js
|
||||
packages/app-mobile/utils/fs-driver/testUtil/verifyDirectoryMatches.js
|
||||
packages/app-mobile/utils/polyfills/bufferPolyfill.js
|
||||
packages/app-mobile/utils/polyfills/index.js
|
||||
packages/app-mobile/utils/setupNotifications.js
|
||||
packages/app-mobile/utils/shareHandler.js
|
||||
packages/app-mobile/utils/types.js
|
||||
|
8
.gitignore
vendored
8
.gitignore
vendored
@ -562,8 +562,16 @@ packages/app-mobile/utils/autodetectTheme.js
|
||||
packages/app-mobile/utils/checkPermissions.js
|
||||
packages/app-mobile/utils/createRootStyle.js
|
||||
packages/app-mobile/utils/debounce.js
|
||||
packages/app-mobile/utils/fs-driver/constants.js
|
||||
packages/app-mobile/utils/fs-driver/fs-driver-rn.js
|
||||
packages/app-mobile/utils/fs-driver/runOnDeviceTests.js
|
||||
packages/app-mobile/utils/fs-driver/tarCreate.js
|
||||
packages/app-mobile/utils/fs-driver/tarExtract.test.js
|
||||
packages/app-mobile/utils/fs-driver/tarExtract.js
|
||||
packages/app-mobile/utils/fs-driver/testUtil/createFilesFromPathRecord.js
|
||||
packages/app-mobile/utils/fs-driver/testUtil/verifyDirectoryMatches.js
|
||||
packages/app-mobile/utils/polyfills/bufferPolyfill.js
|
||||
packages/app-mobile/utils/polyfills/index.js
|
||||
packages/app-mobile/utils/setupNotifications.js
|
||||
packages/app-mobile/utils/shareHandler.js
|
||||
packages/app-mobile/utils/types.js
|
||||
|
@ -6,9 +6,7 @@
|
||||
|
||||
// So there's basically still a one way flux: React => SQLite => Redux => React
|
||||
|
||||
// For aws-sdk-js-v3
|
||||
import 'react-native-get-random-values';
|
||||
import 'react-native-url-polyfill/auto';
|
||||
import './utils/polyfills';
|
||||
|
||||
import { LogBox, AppRegistry } from 'react-native';
|
||||
const Root = require('./root').default;
|
||||
|
4
packages/app-mobile/utils/fs-driver/constants.ts
Normal file
4
packages/app-mobile/utils/fs-driver/constants.ts
Normal file
@ -0,0 +1,4 @@
|
||||
|
||||
// Maximum/expected size of part of a file to be read
|
||||
// eslint-disable-next-line import/prefer-default-export
|
||||
export const chunkSize = 1024 * 100; // 100 KiB
|
@ -3,13 +3,12 @@ const RNFetchBlob = require('rn-fetch-blob').default;
|
||||
import * as RNFS from 'react-native-fs';
|
||||
import RNSAF, { DocumentFileDetail, openDocumentTree } from '@joplin/react-native-saf-x';
|
||||
import { Platform } from 'react-native';
|
||||
import * as tar from 'tar-stream';
|
||||
import { resolve } from 'path';
|
||||
import { Buffer } from 'buffer';
|
||||
import Logger from '@joplin/utils/Logger';
|
||||
import tarCreate from './tarCreate';
|
||||
import tarExtract from './tarExtract';
|
||||
import JoplinError from '@joplin/lib/JoplinError';
|
||||
const md5 = require('md5');
|
||||
import { resolve } from 'path';
|
||||
|
||||
const logger = Logger.create('fs-driver-rn');
|
||||
|
||||
const ANDROID_URI_PREFIX = 'content://';
|
||||
|
||||
@ -51,7 +50,7 @@ const normalizeEncoding = (encoding: string): SupportedEncoding => {
|
||||
|
||||
export default class FsDriverRN extends FsDriverBase {
|
||||
public appendFileSync() {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: appendFileSync');
|
||||
}
|
||||
|
||||
// Requires that the file already exists.
|
||||
@ -212,7 +211,7 @@ export default class FsDriverRN extends FsDriverBase {
|
||||
// return RNFS.touch(path, timestampDate, timestampDate);
|
||||
}
|
||||
|
||||
public async open(path: string, mode: number) {
|
||||
public async open(path: string, mode: string) {
|
||||
if (isScopedUri(path)) {
|
||||
throw new Error('open() not implemented in FsDriverAndroid');
|
||||
}
|
||||
@ -228,7 +227,7 @@ export default class FsDriverRN extends FsDriverBase {
|
||||
};
|
||||
}
|
||||
|
||||
public close(): Promise<void> {
|
||||
public close(_handle: any): Promise<void> {
|
||||
// Nothing
|
||||
return null;
|
||||
}
|
||||
@ -302,58 +301,31 @@ export default class FsDriverRN extends FsDriverBase {
|
||||
}
|
||||
|
||||
public resolve(...paths: string[]): string {
|
||||
throw new Error(`Not implemented: resolve(): ${JSON.stringify(paths)}`);
|
||||
return resolve(...paths);
|
||||
}
|
||||
|
||||
public async md5File(path: string): Promise<string> {
|
||||
throw new Error(`Not implemented: md5File(): ${path}`);
|
||||
if (isScopedUri(path)) {
|
||||
// Warning: Slow
|
||||
const fileData = Buffer.from(await this.readFile(path, 'base64'), 'base64');
|
||||
return md5(fileData);
|
||||
} else {
|
||||
return await RNFS.hash(path, 'md5');
|
||||
}
|
||||
}
|
||||
|
||||
public async tarExtract(_options: any) {
|
||||
throw new Error('Not implemented: tarExtract');
|
||||
public async tarExtract(options: any) {
|
||||
await tarExtract({
|
||||
cwd: RNFS.DocumentDirectoryPath,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
|
||||
public async tarCreate(options: any, filePaths: string[]) {
|
||||
// Choose a default cwd if not given
|
||||
const cwd = options.cwd ?? RNFS.DocumentDirectoryPath;
|
||||
const file = resolve(cwd, options.file);
|
||||
|
||||
if (await this.exists(file)) {
|
||||
throw new Error('Error! Destination already exists');
|
||||
}
|
||||
|
||||
const pack = tar.pack();
|
||||
|
||||
for (const path of filePaths) {
|
||||
const absPath = resolve(cwd, path);
|
||||
const stat = await this.stat(absPath);
|
||||
const sizeBytes: number = stat.size;
|
||||
|
||||
const entry = pack.entry({ name: path, size: sizeBytes }, (error) => {
|
||||
if (error) {
|
||||
logger.error(`Tar error: ${error}`);
|
||||
}
|
||||
});
|
||||
|
||||
const chunkSize = 1024 * 100; // 100 KiB
|
||||
for (let offset = 0; offset < sizeBytes; offset += chunkSize) {
|
||||
// The RNFS documentation suggests using base64 for binary files.
|
||||
const part = await RNFS.read(absPath, chunkSize, offset, 'base64');
|
||||
entry.write(Buffer.from(part, 'base64'));
|
||||
}
|
||||
entry.end();
|
||||
}
|
||||
|
||||
pack.finalize();
|
||||
|
||||
// The streams used by tar-stream seem not to support a chunk size
|
||||
// (it seems despite the typings provided).
|
||||
let data: number[]|null = null;
|
||||
while ((data = pack.read()) !== null) {
|
||||
const buff = Buffer.from(data);
|
||||
const base64Data = buff.toString('base64');
|
||||
await this.appendFile(file, base64Data, 'base64');
|
||||
}
|
||||
await tarCreate({
|
||||
cwd: RNFS.DocumentDirectoryPath,
|
||||
...options,
|
||||
}, filePaths);
|
||||
}
|
||||
|
||||
public async getExternalDirectoryPath(): Promise<string | undefined> {
|
||||
|
@ -5,6 +5,8 @@ import { join } from 'path';
|
||||
import FsDriverBase from '@joplin/lib/fs-driver-base';
|
||||
import Logger from '@joplin/utils/Logger';
|
||||
import { Buffer } from 'buffer';
|
||||
import createFilesFromPathRecord from './testUtil/createFilesFromPathRecord';
|
||||
import verifyDirectoryMatches from './testUtil/verifyDirectoryMatches';
|
||||
|
||||
const logger = Logger.create('fs-driver-tests');
|
||||
|
||||
@ -181,7 +183,7 @@ const testReadFileChunkUtf8 = async (tempDir: string) => {
|
||||
await expectToBe(readData, undefined);
|
||||
};
|
||||
|
||||
const testTarCreate = async (tempDir: string) => {
|
||||
const testTarCreateAndExtract = async (tempDir: string) => {
|
||||
logger.info('Testing fsDriver.tarCreate...');
|
||||
|
||||
const directoryToPack = join(tempDir, uuid.createNano());
|
||||
@ -193,34 +195,31 @@ const testTarCreate = async (tempDir: string) => {
|
||||
|
||||
// small utf-8 encoded files
|
||||
for (let i = 0; i < 10; i ++) {
|
||||
const testFilePath = join(directoryToPack, uuid.createNano());
|
||||
|
||||
const testFileName = uuid.createNano();
|
||||
const fileContent = `✅ Testing... ä ✅ File #${i}`;
|
||||
await fsDriver.writeFile(testFilePath, fileContent, 'utf-8');
|
||||
|
||||
fileContents[testFilePath] = fileContent;
|
||||
fileContents[testFileName] = fileContent;
|
||||
}
|
||||
|
||||
// larger utf-8 encoded files
|
||||
for (let i = 0; i < 3; i ++) {
|
||||
const testFilePath = join(directoryToPack, uuid.createNano());
|
||||
const testFileName = uuid.createNano();
|
||||
|
||||
let fileContent = `✅ Testing... ä ✅ File #${i}`;
|
||||
|
||||
for (let j = 0; j < 8; j ++) {
|
||||
fileContent += fileContent;
|
||||
}
|
||||
|
||||
await fsDriver.writeFile(testFilePath, fileContent, 'utf-8');
|
||||
|
||||
fileContents[testFilePath] = fileContent;
|
||||
fileContents[testFileName] = fileContent;
|
||||
}
|
||||
|
||||
await createFilesFromPathRecord(directoryToPack, fileContents);
|
||||
|
||||
// Pack the files
|
||||
const pathsToTar = Object.keys(fileContents);
|
||||
const tarOutputPath = join(tempDir, 'test-tar.tar');
|
||||
await fsDriver.tarCreate({
|
||||
cwd: tempDir,
|
||||
cwd: directoryToPack,
|
||||
file: tarOutputPath,
|
||||
}, pathsToTar);
|
||||
|
||||
@ -231,6 +230,27 @@ const testTarCreate = async (tempDir: string) => {
|
||||
for (const fileContent of Object.values(fileContents)) {
|
||||
await expectToBe(rawTarData.includes(fileContent), true);
|
||||
}
|
||||
|
||||
logger.info('Testing fsDriver.tarExtract...');
|
||||
|
||||
const outputDirectory = join(tempDir, uuid.createNano());
|
||||
await fsDriver.mkdir(outputDirectory);
|
||||
await fsDriver.tarExtract({
|
||||
cwd: outputDirectory,
|
||||
file: tarOutputPath,
|
||||
});
|
||||
|
||||
await verifyDirectoryMatches(outputDirectory, fileContents);
|
||||
};
|
||||
|
||||
const testMd5File = async (tempDir: string) => {
|
||||
logger.info('Testing fsDriver.md5file...');
|
||||
const fsDriver = shim.fsDriver();
|
||||
|
||||
const testFilePath = join(tempDir, `test-md5-${uuid.createNano()}`);
|
||||
await fsDriver.writeFile(testFilePath, '🚧test', 'utf8');
|
||||
|
||||
await expectToBe(await fsDriver.md5File(testFilePath), 'ba11ba1be5042133a71874731e3d42cd');
|
||||
};
|
||||
|
||||
// In the past, some fs-driver functionality has worked correctly on some devices and not others.
|
||||
@ -247,7 +267,10 @@ const runOnDeviceTests = async () => {
|
||||
await testAppendFile(tempDir);
|
||||
await testReadWriteFileUtf8(tempDir);
|
||||
await testReadFileChunkUtf8(tempDir);
|
||||
await testTarCreate(tempDir);
|
||||
await testTarCreateAndExtract(tempDir);
|
||||
await testMd5File(tempDir);
|
||||
|
||||
logger.info('Done');
|
||||
} catch (error) {
|
||||
const errorMessage = `On-device testing failed with an exception: ${error}.`;
|
||||
|
||||
|
62
packages/app-mobile/utils/fs-driver/tarCreate.ts
Normal file
62
packages/app-mobile/utils/fs-driver/tarCreate.ts
Normal file
@ -0,0 +1,62 @@
|
||||
import { pack as tarStreamPack } from 'tar-stream';
|
||||
import { resolve } from 'path';
|
||||
import * as RNFS from 'react-native-fs';
|
||||
|
||||
import Logger from '@joplin/utils/Logger';
|
||||
import { chunkSize } from './constants';
|
||||
import shim from '@joplin/lib/shim';
|
||||
|
||||
const logger = Logger.create('fs-driver-rn');
|
||||
|
||||
interface TarCreateOptions {
|
||||
cwd: string;
|
||||
file: string;
|
||||
}
|
||||
|
||||
// TODO: Support glob patterns, which are currently supported by the
|
||||
// node fsDriver.
|
||||
|
||||
const tarCreate = async (options: TarCreateOptions, filePaths: string[]) => {
|
||||
// Choose a default cwd if not given
|
||||
const cwd = options.cwd ?? RNFS.DocumentDirectoryPath;
|
||||
const file = resolve(cwd, options.file);
|
||||
|
||||
const fsDriver = shim.fsDriver();
|
||||
if (await fsDriver.exists(file)) {
|
||||
throw new Error('Error! Destination already exists');
|
||||
}
|
||||
|
||||
const pack = tarStreamPack();
|
||||
|
||||
for (const path of filePaths) {
|
||||
const absPath = resolve(cwd, path);
|
||||
const stat = await fsDriver.stat(absPath);
|
||||
const sizeBytes: number = stat.size;
|
||||
|
||||
const entry = pack.entry({ name: path, size: sizeBytes }, (error) => {
|
||||
if (error) {
|
||||
logger.error(`Tar error: ${error}`);
|
||||
}
|
||||
});
|
||||
|
||||
for (let offset = 0; offset < sizeBytes; offset += chunkSize) {
|
||||
// The RNFS documentation suggests using base64 for binary files.
|
||||
const part = await RNFS.read(absPath, chunkSize, offset, 'base64');
|
||||
entry.write(Buffer.from(part, 'base64'));
|
||||
}
|
||||
entry.end();
|
||||
}
|
||||
|
||||
pack.finalize();
|
||||
|
||||
// The streams used by tar-stream seem not to support a chunk size
|
||||
// (it seems despite the typings provided).
|
||||
let data: number[]|null = null;
|
||||
while ((data = pack.read()) !== null) {
|
||||
const buff = Buffer.from(data);
|
||||
const base64Data = buff.toString('base64');
|
||||
await fsDriver.appendFile(file, base64Data, 'base64');
|
||||
}
|
||||
};
|
||||
|
||||
export default tarCreate;
|
65
packages/app-mobile/utils/fs-driver/tarExtract.test.ts
Normal file
65
packages/app-mobile/utils/fs-driver/tarExtract.test.ts
Normal file
@ -0,0 +1,65 @@
|
||||
|
||||
// tarExtract has tests both in runOnDeviceTests and here.
|
||||
// Just Jest tests aren't sufficient in this case because, in the past, differences
|
||||
// between polyfilled and node-built-in libraries have caused issues.
|
||||
|
||||
import shim from '@joplin/lib/shim';
|
||||
import { createTempDir } from '@joplin/lib/testing/test-utils';
|
||||
import { join } from 'path';
|
||||
import createFilesFromPathRecord from './testUtil/createFilesFromPathRecord';
|
||||
import verifyDirectoryMatches from './testUtil/verifyDirectoryMatches';
|
||||
import tarExtract from './tarExtract';
|
||||
import { remove } from 'fs-extra';
|
||||
|
||||
|
||||
const verifyTarWithContentExtractsTo = async (filePaths: Record<string, string>) => {
|
||||
const tempDir = await createTempDir();
|
||||
|
||||
try {
|
||||
const sourceDirectory = join(tempDir, 'source');
|
||||
await createFilesFromPathRecord(sourceDirectory, filePaths);
|
||||
|
||||
const tarOutputFile = join(tempDir, 'test.tar');
|
||||
// Uses node tar during testing
|
||||
await shim.fsDriver().tarCreate(
|
||||
{ cwd: sourceDirectory, file: tarOutputFile }, Object.keys(filePaths),
|
||||
);
|
||||
|
||||
const outputDirectory = join(tempDir, 'dest');
|
||||
await tarExtract({
|
||||
cwd: outputDirectory,
|
||||
file: tarOutputFile,
|
||||
});
|
||||
|
||||
await verifyDirectoryMatches(outputDirectory, filePaths);
|
||||
} finally {
|
||||
await remove(tempDir);
|
||||
}
|
||||
};
|
||||
|
||||
describe('tarExtract', () => {
|
||||
it('should extract a tar with a single file', async () => {
|
||||
await verifyTarWithContentExtractsTo({
|
||||
'a.txt': 'Test',
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract tar files containing unicode characters', async () => {
|
||||
await verifyTarWithContentExtractsTo({
|
||||
'a.txt': 'Test✅',
|
||||
'b/á-test.txt': 'Test letters: ϑ, ó, ö, ś',
|
||||
'c/á-test.txt': 'This also works.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract tar files with deeply nested subdirectories', async () => {
|
||||
await verifyTarWithContentExtractsTo({
|
||||
'a.txt': 'Test✅',
|
||||
'b/c/d/e/f/test-Ó.txt': 'Test letters: ϑ, ó, ö, ś',
|
||||
'b/c/d/e/f/test2.txt': 'This works.',
|
||||
'b/test3.txt': 'This also works.',
|
||||
'b/test4.txt': 'This also works...',
|
||||
'b/c/test4.txt': 'This also works.',
|
||||
});
|
||||
});
|
||||
});
|
92
packages/app-mobile/utils/fs-driver/tarExtract.ts
Normal file
92
packages/app-mobile/utils/fs-driver/tarExtract.ts
Normal file
@ -0,0 +1,92 @@
|
||||
import { extract as tarStreamExtract } from 'tar-stream';
|
||||
import { resolve, dirname } from 'path';
|
||||
import shim from '@joplin/lib/shim';
|
||||
import { chunkSize } from './constants';
|
||||
|
||||
interface TarExtractOptions {
|
||||
cwd: string;
|
||||
file: string;
|
||||
}
|
||||
|
||||
const tarExtract = async (options: TarExtractOptions) => {
|
||||
const cwd = options.cwd;
|
||||
|
||||
// resolve doesn't correctly handle file:// or content:// URLs. Thus, we don't resolve relative
|
||||
// to cwd if the source is a URL.
|
||||
const isSourceUrl = options.file.match(/$[a-z]+:\/\//);
|
||||
const filePath = isSourceUrl ? options.file : resolve(cwd, options.file);
|
||||
|
||||
const fsDriver = shim.fsDriver();
|
||||
if (!(await fsDriver.exists(filePath))) {
|
||||
throw new Error('tarExtract: Source file does not exist');
|
||||
}
|
||||
|
||||
const extract = tarStreamExtract({ defaultEncoding: 'base64' });
|
||||
|
||||
extract.on('entry', async (header, stream, next) => {
|
||||
const outPath = fsDriver.resolveRelativePathWithinDir(cwd, header.name);
|
||||
|
||||
if (await fsDriver.exists(outPath)) {
|
||||
throw new Error(`Extracting ${outPath} would overwrite`);
|
||||
}
|
||||
|
||||
// Move to the next item when all available data has been read.
|
||||
stream.once('end', () => next());
|
||||
|
||||
if (header.type === 'directory') {
|
||||
await fsDriver.mkdir(outPath);
|
||||
} else if (header.type === 'file') {
|
||||
const parentDir = dirname(outPath);
|
||||
await fsDriver.mkdir(parentDir);
|
||||
|
||||
await fsDriver.appendBinaryReadableToFile(outPath, stream);
|
||||
} else {
|
||||
throw new Error(`Unsupported file system entity type: ${header.type}`);
|
||||
}
|
||||
|
||||
// Drain the rest of the stream.
|
||||
stream.resume();
|
||||
|
||||
});
|
||||
|
||||
let finished = false;
|
||||
const finishPromise = new Promise<void>((resolve, reject) => {
|
||||
extract.once('finish', () => {
|
||||
finished = true;
|
||||
resolve();
|
||||
});
|
||||
|
||||
extract.once('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
|
||||
const fileHandle = await fsDriver.open(filePath, 'r');
|
||||
const readChunk = async () => {
|
||||
const base64 = await fsDriver.readFileChunk(fileHandle, chunkSize, 'base64');
|
||||
return base64 && Buffer.from(base64, 'base64');
|
||||
};
|
||||
|
||||
try {
|
||||
let chunk = await readChunk();
|
||||
let nextChunk = await readChunk();
|
||||
do {
|
||||
extract.write(chunk);
|
||||
|
||||
chunk = nextChunk;
|
||||
nextChunk = await readChunk();
|
||||
} while (nextChunk !== null && !finished);
|
||||
|
||||
if (chunk !== null) {
|
||||
extract.end(chunk);
|
||||
} else {
|
||||
extract.end();
|
||||
}
|
||||
} finally {
|
||||
await fsDriver.close(fileHandle);
|
||||
}
|
||||
|
||||
await finishPromise;
|
||||
};
|
||||
|
||||
export default tarExtract;
|
@ -0,0 +1,13 @@
|
||||
|
||||
import shim from '@joplin/lib/shim';
|
||||
import { join, dirname } from 'path';
|
||||
|
||||
const createFilesFromPathRecord = async (baseDir: string, fileContents: Record<string, string>) => {
|
||||
for (const relativePath in fileContents) {
|
||||
const targetPath = join(baseDir, relativePath);
|
||||
await shim.fsDriver().mkdir(dirname(targetPath));
|
||||
await shim.fsDriver().writeFile(targetPath, fileContents[relativePath], 'utf-8');
|
||||
}
|
||||
};
|
||||
|
||||
export default createFilesFromPathRecord;
|
@ -0,0 +1,22 @@
|
||||
|
||||
import shim from '@joplin/lib/shim';
|
||||
import { join } from 'path';
|
||||
|
||||
const verifyDirectoryMatches = async (baseDir: string, fileContents: Record<string, string>) => {
|
||||
for (const path in fileContents) {
|
||||
const fileContent = await shim.fsDriver().readFile(join(baseDir, path), 'utf8');
|
||||
const expectedContent = fileContents[path];
|
||||
if (fileContent !== fileContents[path]) {
|
||||
throw new Error(`File ${path} content mismatch. Was ${JSON.stringify(fileContent)}, expected ${JSON.stringify(expectedContent)}.`);
|
||||
}
|
||||
}
|
||||
|
||||
const dirStats = await shim.fsDriver().readDirStats(baseDir, { recursive: true });
|
||||
for (const stat of dirStats) {
|
||||
if (!stat.isDirectory() && !(stat.path in fileContents)) {
|
||||
throw new Error(`Unexpected file with path ${stat.path} found.`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default verifyDirectoryMatches;
|
15
packages/app-mobile/utils/polyfills/bufferPolyfill.ts
Normal file
15
packages/app-mobile/utils/polyfills/bufferPolyfill.ts
Normal file
@ -0,0 +1,15 @@
|
||||
|
||||
import { Buffer } from 'buffer';
|
||||
|
||||
// Fix the subarray method.
|
||||
// TODO: Remove this after https://github.com/feross/buffer/issues/329 is closed
|
||||
const originalSubarray = Buffer.prototype.subarray;
|
||||
Buffer.prototype.subarray = function(start: number, end: number) {
|
||||
const subarray = originalSubarray.call(this, start, end);
|
||||
Object.setPrototypeOf(subarray, Buffer.prototype);
|
||||
return subarray;
|
||||
};
|
||||
|
||||
// TODO: Remove this "disable-next-line" after eslint supports globalThis.
|
||||
// eslint-disable-next-line no-undef
|
||||
globalThis.Buffer = Buffer;
|
5
packages/app-mobile/utils/polyfills/index.ts
Normal file
5
packages/app-mobile/utils/polyfills/index.ts
Normal file
@ -0,0 +1,5 @@
|
||||
import './bufferPolyfill';
|
||||
|
||||
// For aws-sdk-js-v3
|
||||
import 'react-native-get-random-values';
|
||||
import 'react-native-url-polyfill/auto';
|
@ -2,6 +2,7 @@ import time from './time';
|
||||
import Setting from './models/Setting';
|
||||
import { filename, fileExtension } from './path-utils';
|
||||
const md5 = require('md5');
|
||||
import { Buffer } from 'buffer';
|
||||
|
||||
export interface Stat {
|
||||
birthtime: Date;
|
||||
@ -18,35 +19,36 @@ export interface ReadDirStatsOptions {
|
||||
export default class FsDriverBase {
|
||||
|
||||
public async stat(_path: string): Promise<Stat> {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: stat()');
|
||||
}
|
||||
|
||||
public async readFile(_path: string, _encoding = 'utf8'): Promise<any> {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: readFile');
|
||||
}
|
||||
|
||||
public async appendFile(_path: string, _content: string, _encoding = 'base64'): Promise<any> {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: appendFile');
|
||||
}
|
||||
|
||||
public async copy(_source: string, _dest: string) {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: copy');
|
||||
}
|
||||
|
||||
public async chmod(_source: string, _mode: string | number) {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: chmod');
|
||||
}
|
||||
|
||||
// Must also create parent directories
|
||||
public async mkdir(_path: string) {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: mkdir');
|
||||
}
|
||||
|
||||
public async unlink(_path: string) {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: unlink');
|
||||
}
|
||||
|
||||
public async move(_source: string, _dest: string) {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: move');
|
||||
}
|
||||
|
||||
public async rename(source: string, dest: string) {
|
||||
@ -54,27 +56,27 @@ export default class FsDriverBase {
|
||||
}
|
||||
|
||||
public async readFileChunk(_handle: any, _length: number, _encoding = 'base64'): Promise<string> {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: readFileChunk');
|
||||
}
|
||||
|
||||
public async open(_path: string, _mode: any): Promise<any> {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: open');
|
||||
}
|
||||
|
||||
public async close(_handle: any): Promise<any> {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: close');
|
||||
}
|
||||
|
||||
public async readDirStats(_path: string, _options: ReadDirStatsOptions = null): Promise<Stat[]> {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: readDirStats');
|
||||
}
|
||||
|
||||
public async exists(_path: string): Promise<boolean> {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: exists');
|
||||
}
|
||||
|
||||
public async remove(_path: string): Promise<void> {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: remove');
|
||||
}
|
||||
|
||||
public async isDirectory(path: string) {
|
||||
@ -94,8 +96,14 @@ export default class FsDriverBase {
|
||||
throw new Error('Not implemented: resolve');
|
||||
}
|
||||
|
||||
public resolveRelativePathWithinDir(_baseDir: string, relativePath: string): string {
|
||||
throw new Error(`Not implemented: resolveRelativePathWithinDir(): ${relativePath}`);
|
||||
// Resolves the provided relative path to an absolute path within baseDir. The function
|
||||
// also checks that the absolute path is within baseDir, to avoid security issues.
|
||||
// It is expected that baseDir is a safe path (not user-provided).
|
||||
public resolveRelativePathWithinDir(baseDir: string, relativePath: string) {
|
||||
const resolvedBaseDir = this.resolve(baseDir);
|
||||
const resolvedPath = this.resolve(baseDir, relativePath);
|
||||
if (resolvedPath.indexOf(resolvedBaseDir) !== 0) throw new Error(`Resolved path for relative path "${relativePath}" is not within base directory "${baseDir}" (Was resolved to ${resolvedPath})`);
|
||||
return resolvedPath;
|
||||
}
|
||||
|
||||
public getExternalDirectoryPath(): Promise<string | undefined> {
|
||||
@ -106,6 +114,15 @@ export default class FsDriverBase {
|
||||
return false;
|
||||
}
|
||||
|
||||
public async appendBinaryReadableToFile(path: string, readable: { read(): number[]|null }) {
|
||||
let data: number[]|null = null;
|
||||
while ((data = readable.read()) !== null) {
|
||||
const buff = Buffer.from(data);
|
||||
const base64Data = buff.toString('base64');
|
||||
await this.appendFile(path, base64Data, 'base64');
|
||||
}
|
||||
}
|
||||
|
||||
protected async readDirStatsHandleRecursion_(basePath: string, stat: Stat, output: Stat[], options: ReadDirStatsOptions): Promise<Stat[]> {
|
||||
if (options.recursive && stat.isDirectory()) {
|
||||
const subPath = `${basePath}/${stat.path}`;
|
||||
@ -187,11 +204,11 @@ export default class FsDriverBase {
|
||||
}
|
||||
|
||||
public async tarExtract(_options: any) {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: tarExtract');
|
||||
}
|
||||
|
||||
public async tarCreate(_options: any, _filePaths: string[]) {
|
||||
throw new Error('Not implemented');
|
||||
throw new Error('Not implemented: tarCreate');
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { resolve as nodeResolve } from 'path';
|
||||
import FsDriverBase, { Stat } from './fs-driver-base';
|
||||
import time from './time';
|
||||
const md5File = require('md5-file');
|
||||
@ -84,7 +83,7 @@ export default class FsDriverNode extends FsDriverBase {
|
||||
return r;
|
||||
}
|
||||
|
||||
public async stat(path: string) {
|
||||
public async stat(path: string): Promise<Stat> {
|
||||
try {
|
||||
const stat = await fs.stat(path);
|
||||
return {
|
||||
@ -186,18 +185,8 @@ export default class FsDriverNode extends FsDriverBase {
|
||||
throw new Error(`Unsupported encoding: ${encoding}`);
|
||||
}
|
||||
|
||||
public resolve(path: string) {
|
||||
return require('path').resolve(path);
|
||||
}
|
||||
|
||||
// Resolves the provided relative path to an absolute path within baseDir. The function
|
||||
// also checks that the absolute path is within baseDir, to avoid security issues.
|
||||
// It is expected that baseDir is a safe path (not user-provided).
|
||||
public resolveRelativePathWithinDir(baseDir: string, relativePath: string) {
|
||||
const resolvedBaseDir = nodeResolve(baseDir);
|
||||
const resolvedPath = nodeResolve(baseDir, relativePath);
|
||||
if (resolvedPath.indexOf(resolvedBaseDir) !== 0) throw new Error(`Resolved path for relative path "${relativePath}" is not within base directory "${baseDir}" (Was resolved to ${resolvedPath})`);
|
||||
return resolvedPath;
|
||||
public resolve(...pathComponents: string[]) {
|
||||
return require('path').resolve(...pathComponents);
|
||||
}
|
||||
|
||||
public async md5File(path: string): Promise<string> {
|
||||
|
Loading…
Reference in New Issue
Block a user