Example #1
0
export async function importInto(db: Dexie, exportedData: Blob | JsonStream<DexieExportJsonStructure>, options?: ImportOptions): Promise<void> {
  options = options || {}; // All booleans defaults to false.
  const CHUNK_SIZE = options!.chunkSizeBytes || (DEFAULT_KILOBYTES_PER_CHUNK * 1024);
  const jsonStream = await loadUntilWeGotEnoughData(exportedData, CHUNK_SIZE);
  let dbExportFile = jsonStream.result;
  const readBlobsSynchronously = 'FileReaderSync' in self; // true in workers only.

  const dbExport = dbExportFile.data!;

  if (!options!.acceptNameDiff && db.name !== dbExport.databaseName)
    throw new Error(`Name differs. Current database name is ${db.name} but export is ${dbExport.databaseName}`);
  if (!options!.acceptVersionDiff && db.verno !== dbExport.databaseVersion) {
    // Possible feature: Call upgraders in some isolated way if this happens... ?
    throw new Error(`Database version differs. Current database is in version ${db.verno} but export is ${dbExport.databaseVersion}`);
  }
  
  const { progressCallback } = options;
  const progress: ImportProgress = {
    done: false,
    completedRows: 0,
    completedTables: 0,
    totalRows: dbExport.tables.reduce((p, c) => p + c.rowCount, 0),
    totalTables: dbExport.tables.length
  };
  if (progressCallback) {
    // Keep ongoing transaction private
    Dexie.ignoreTransaction(()=>progressCallback(progress));
  }

  if (options.noTransaction) {
    await importAll();
  } else {
    await db.transaction('rw', db.tables, importAll);
  }  

  async function importAll () {
    do {
      for (const tableExport of dbExport.data) {
        if (!tableExport.rows) break; // Need to pull more!
        if ((tableExport.rows as any).complete && tableExport.rows.length === 0)
          continue;

        if (progressCallback) {
          // Keep ongoing transaction private
          Dexie.ignoreTransaction(()=>progressCallback(progress));
        }
        const tableName = tableExport.tableName;
        const table = db.table(tableName);
        const tableSchemaStr = dbExport.tables.filter(t => t.name === tableName)[0].schema;
        if (!table) {
          if (!options!.acceptMissingTables)
            throw new Error(`Exported table ${tableExport.tableName} is missing in installed database`);
          else
            continue;
        }
        if (!options!.acceptChangedPrimaryKey &&
          tableSchemaStr.split(',')[0] != table.schema.primKey.src) {
          throw new Error(`Primary key differs for table ${tableExport.tableName}. `);
        }
        const rows = tableExport.rows.map(row => TSON.revive(row));
        const filter = options!.filter;
        const filteredRows = filter ?
          tableExport.inbound ?
            rows.filter(value => filter(tableName, value)) :
            rows.filter(([key, value]) => filter(tableName, value, key)) :
          rows;
        const [keys, values] = tableExport.inbound ?
          [undefined, filteredRows] :
          [filteredRows.map(row=>row[0]), rows.map(row=>row[1])];

        if (options!.clearTablesBeforeImport) {
          await table.clear();
        }
        if (options!.overwriteValues)
          await table.bulkPut(values, keys);
        else
          await table.bulkAdd(values, keys);
          
        progress.completedRows += rows.length;
        if ((rows as any).complete) {
          progress.completedTables += 1;
        }
        rows.splice(0, rows.length); // Free up RAM, keep existing array instance.
      }

      // Avoid unnescessary loops in "for (const tableExport of dbExport.data)" 
      while (dbExport.data.length > 0 && dbExport.data[0].rows && (dbExport.data[0].rows as any).complete) {
        // We've already imported all rows from the first table. Delete its occurrence
        dbExport.data.splice(0, 1); 
      }
      if (!jsonStream.done() && !jsonStream.eof()) {
        // Pull some more (keeping transaction alive)
        if (readBlobsSynchronously) {
          // If we can pull from blob synchronically, we don't have to
          // keep transaction alive using Dexie.waitFor().
          // This will only be possible in workers.
          jsonStream.pullSync(CHUNK_SIZE);
        } else {
          await Dexie.waitFor(jsonStream.pullAsync(CHUNK_SIZE));
        }
      }
    } while (!jsonStream.done() && !jsonStream.eof());
  }
  progress.done = true;
  if (progressCallback) {
    // Keep ongoing transaction private
    Dexie.ignoreTransaction(()=>progressCallback(progress));
  }
}
Example #2
0
  async function importAll () {
    do {
      for (const tableExport of dbExport.data) {
        if (!tableExport.rows) break; // Need to pull more!
        if ((tableExport.rows as any).complete && tableExport.rows.length === 0)
          continue;

        if (progressCallback) {
          // Keep ongoing transaction private
          Dexie.ignoreTransaction(()=>progressCallback(progress));
        }
        const tableName = tableExport.tableName;
        const table = db.table(tableName);
        const tableSchemaStr = dbExport.tables.filter(t => t.name === tableName)[0].schema;
        if (!table) {
          if (!options!.acceptMissingTables)
            throw new Error(`Exported table ${tableExport.tableName} is missing in installed database`);
          else
            continue;
        }
        if (!options!.acceptChangedPrimaryKey &&
          tableSchemaStr.split(',')[0] != table.schema.primKey.src) {
          throw new Error(`Primary key differs for table ${tableExport.tableName}. `);
        }
        const rows = tableExport.rows.map(row => TSON.revive(row));
        const filter = options!.filter;
        const filteredRows = filter ?
          tableExport.inbound ?
            rows.filter(value => filter(tableName, value)) :
            rows.filter(([key, value]) => filter(tableName, value, key)) :
          rows;
        const [keys, values] = tableExport.inbound ?
          [undefined, filteredRows] :
          [filteredRows.map(row=>row[0]), rows.map(row=>row[1])];

        if (options!.clearTablesBeforeImport) {
          await table.clear();
        }
        if (options!.overwriteValues)
          await table.bulkPut(values, keys);
        else
          await table.bulkAdd(values, keys);
          
        progress.completedRows += rows.length;
        if ((rows as any).complete) {
          progress.completedTables += 1;
        }
        rows.splice(0, rows.length); // Free up RAM, keep existing array instance.
      }

      // Avoid unnescessary loops in "for (const tableExport of dbExport.data)" 
      while (dbExport.data.length > 0 && dbExport.data[0].rows && (dbExport.data[0].rows as any).complete) {
        // We've already imported all rows from the first table. Delete its occurrence
        dbExport.data.splice(0, 1); 
      }
      if (!jsonStream.done() && !jsonStream.eof()) {
        // Pull some more (keeping transaction alive)
        if (readBlobsSynchronously) {
          // If we can pull from blob synchronically, we don't have to
          // keep transaction alive using Dexie.waitFor().
          // This will only be possible in workers.
          jsonStream.pullSync(CHUNK_SIZE);
        } else {
          await Dexie.waitFor(jsonStream.pullAsync(CHUNK_SIZE));
        }
      }
    } while (!jsonStream.done() && !jsonStream.eof());
  }
Example #3
0
  async function exportAll() {
    // Count rows:
    const tablesRowCounts = await Promise.all(db.tables.map(table => table.count()));
    tablesRowCounts.forEach((rowCount, i) => tables[i].rowCount = rowCount);
    progress.totalRows = tablesRowCounts.reduce((p,c)=>p+c);

    // Write first JSON slice
    const emptyExportJson = JSON.stringify(emptyExport, undefined, prettyJson ? 2 : undefined);
    const posEndDataArray = emptyExportJson.lastIndexOf(']');
    const firstJsonSlice = emptyExportJson.substring(0, posEndDataArray);
    slices.push(firstJsonSlice);

    const filter = options!.filter;

    for (const {name: tableName} of tables) {
      const table = db.table(tableName);
      const {primKey} = table.schema;
      const inbound = !!primKey.keyPath;
      const LIMIT = options!.numRowsPerChunk || DEFAULT_ROWS_PER_CHUNK;
      const emptyTableExport: DexieExportedTable = inbound ? {
        tableName: table.name,
        inbound: true,
        rows: []
      } : {
        tableName: table.name,
        inbound: false,
        rows: []
      };
      let emptyTableExportJson = JSON.stringify(emptyTableExport, undefined, prettyJson ? 2 : undefined);
      if (prettyJson) {
        // Increase indentation according to this:
        // {
        //   ...
        //   data: [
        //     ...
        //     data: [
        // 123456<---- here
        //     ] 
        //   ]
        // }
        emptyTableExportJson = emptyTableExportJson.split('\n').join('\n    ');
      }
      const posEndRowsArray = emptyTableExportJson.lastIndexOf(']');
      slices.push(emptyTableExportJson.substring(0, posEndRowsArray));
      let lastKey: any = null;
      let mayHaveMoreRows = true;
      while (mayHaveMoreRows) {
        if (progressCallback) {
          // Keep ongoing transaction private
          Dexie.ignoreTransaction(()=>progressCallback(progress));
        }
        const chunkedCollection = lastKey == null ?
          table.limit(LIMIT) :
          table.where(':id').above(lastKey).limit(LIMIT);

        const values = await chunkedCollection.toArray();

        if (values.length === 0) break;

        if (lastKey != null) {
          // Not initial chunk. Must add a comma:
          slices.push(",");
          if (prettyJson) {
            slices.push("\n      ");
          }
        }

        mayHaveMoreRows = values.length === LIMIT;
        
        if (inbound) {
          const filteredValues = filter ?
            values.filter(value => filter(tableName, value)) :
            values;

          const tsonValues = filteredValues.map(value => TSON.encapsulate(value));
          if (TSON.mustFinalize()) {
            await Dexie.waitFor(TSON.finalize(tsonValues));
          }

          let json = JSON.stringify(tsonValues, undefined, prettyJson ? 2 : undefined);
          if (prettyJson) json = json.split('\n').join('\n      ');

          // By generating a blob here, we give web platform the opportunity to store the contents
          // on disk and release RAM.
          slices.push(new Blob([json.substring(1, json.length - 1)]));
          lastKey = values.length > 0 ?
            Dexie.getByKeyPath(values[values.length -1], primKey.keyPath as string) :
            null;
        } else {
          const keys = await chunkedCollection.primaryKeys();
          let keyvals = keys.map((key, i) => [key, values[i]]);
          if (filter) keyvals = keyvals.filter(([key, value]) => filter(tableName, value, key));

          const tsonTuples = keyvals.map(tuple => TSON.encapsulate(tuple));
          if (TSON.mustFinalize()) {
            await Dexie.waitFor(TSON.finalize(tsonTuples));
          }

          let json = JSON.stringify(tsonTuples, undefined, prettyJson ? 2 : undefined);
          if (prettyJson) json = json.split('\n').join('\n      ');

          // By generating a blob here, we give web platform the opportunity to store the contents
          // on disk and release RAM.
          slices.push(new Blob([json.substring(1, json.length - 1)]));
          lastKey = keys.length > 0 ?
            keys[keys.length - 1] :
            null;
        }
        progress.completedRows += values.length;
      }
      slices.push(emptyTableExportJson.substr(posEndRowsArray)); // "]}"
      progress.completedTables += 1;
      if (progress.completedTables < progress.totalTables) {
        slices.push(",");
      }
    }
    slices.push(emptyExportJson.substr(posEndDataArray));
    progress.done = true;
    if (progressCallback) {
      // Keep ongoing transaction private
      Dexie.ignoreTransaction(()=>progressCallback(progress));
    }
  }