Exemplo n.º 1
0
TSON.finalize = async (items?: any[]) => {
  const allChunks = await readBlobAsync(new Blob(blobsToAwait), 'binary');
  if (items) {
    for (const item of items) {
      // Manually go through all "blob" types in the result
      // and lookup the data slice they point at.
      if (item.$types) {
        let types = item.$types;
        const arrayType = types.$;
        if (arrayType) types = types.$;
        for (let keyPath in types) {
          const typeName = types[keyPath];
          const typeSpec = TSON.types[typeName];
          if (typeSpec && typeSpec.finalize) {
            const b = Dexie.getByKeyPath(item, arrayType ? "$." + keyPath : keyPath);
            typeSpec.finalize(b, allChunks.slice(b.start, b.end));
          }
        }
      }
    }
  }
  // Free up memory
  blobsToAwait = [];
}
Exemplo n.º 2
0
  async function exportAll() {
    // Count rows:
    const tablesRowCounts = await Promise.all(db.tables.map(table => table.count()));
    tablesRowCounts.forEach((rowCount, i) => tables[i].rowCount = rowCount);
    progress.totalRows = tablesRowCounts.reduce((p,c)=>p+c);

    // Write first JSON slice
    const emptyExportJson = JSON.stringify(emptyExport, undefined, prettyJson ? 2 : undefined);
    const posEndDataArray = emptyExportJson.lastIndexOf(']');
    const firstJsonSlice = emptyExportJson.substring(0, posEndDataArray);
    slices.push(firstJsonSlice);

    const filter = options!.filter;

    for (const {name: tableName} of tables) {
      const table = db.table(tableName);
      const {primKey} = table.schema;
      const inbound = !!primKey.keyPath;
      const LIMIT = options!.numRowsPerChunk || DEFAULT_ROWS_PER_CHUNK;
      const emptyTableExport: DexieExportedTable = inbound ? {
        tableName: table.name,
        inbound: true,
        rows: []
      } : {
        tableName: table.name,
        inbound: false,
        rows: []
      };
      let emptyTableExportJson = JSON.stringify(emptyTableExport, undefined, prettyJson ? 2 : undefined);
      if (prettyJson) {
        // Increase indentation according to this:
        // {
        //   ...
        //   data: [
        //     ...
        //     data: [
        // 123456<---- here
        //     ] 
        //   ]
        // }
        emptyTableExportJson = emptyTableExportJson.split('\n').join('\n    ');
      }
      const posEndRowsArray = emptyTableExportJson.lastIndexOf(']');
      slices.push(emptyTableExportJson.substring(0, posEndRowsArray));
      let lastKey: any = null;
      let mayHaveMoreRows = true;
      while (mayHaveMoreRows) {
        if (progressCallback) {
          // Keep ongoing transaction private
          Dexie.ignoreTransaction(()=>progressCallback(progress));
        }
        const chunkedCollection = lastKey == null ?
          table.limit(LIMIT) :
          table.where(':id').above(lastKey).limit(LIMIT);

        const values = await chunkedCollection.toArray();

        if (values.length === 0) break;

        if (lastKey != null) {
          // Not initial chunk. Must add a comma:
          slices.push(",");
          if (prettyJson) {
            slices.push("\n      ");
          }
        }

        mayHaveMoreRows = values.length === LIMIT;
        
        if (inbound) {
          const filteredValues = filter ?
            values.filter(value => filter(tableName, value)) :
            values;

          const tsonValues = filteredValues.map(value => TSON.encapsulate(value));
          if (TSON.mustFinalize()) {
            await Dexie.waitFor(TSON.finalize(tsonValues));
          }

          let json = JSON.stringify(tsonValues, undefined, prettyJson ? 2 : undefined);
          if (prettyJson) json = json.split('\n').join('\n      ');

          // By generating a blob here, we give web platform the opportunity to store the contents
          // on disk and release RAM.
          slices.push(new Blob([json.substring(1, json.length - 1)]));
          lastKey = values.length > 0 ?
            Dexie.getByKeyPath(values[values.length -1], primKey.keyPath as string) :
            null;
        } else {
          const keys = await chunkedCollection.primaryKeys();
          let keyvals = keys.map((key, i) => [key, values[i]]);
          if (filter) keyvals = keyvals.filter(([key, value]) => filter(tableName, value, key));

          const tsonTuples = keyvals.map(tuple => TSON.encapsulate(tuple));
          if (TSON.mustFinalize()) {
            await Dexie.waitFor(TSON.finalize(tsonTuples));
          }

          let json = JSON.stringify(tsonTuples, undefined, prettyJson ? 2 : undefined);
          if (prettyJson) json = json.split('\n').join('\n      ');

          // By generating a blob here, we give web platform the opportunity to store the contents
          // on disk and release RAM.
          slices.push(new Blob([json.substring(1, json.length - 1)]));
          lastKey = keys.length > 0 ?
            keys[keys.length - 1] :
            null;
        }
        progress.completedRows += values.length;
      }
      slices.push(emptyTableExportJson.substr(posEndRowsArray)); // "]}"
      progress.completedTables += 1;
      if (progress.completedTables < progress.totalTables) {
        slices.push(",");
      }
    }
    slices.push(emptyExportJson.substr(posEndDataArray));
    progress.done = true;
    if (progressCallback) {
      // Keep ongoing transaction private
      Dexie.ignoreTransaction(()=>progressCallback(progress));
    }
  }