.forEach(index => {
   const from = this.courseOffsets[index];
   const to = this.courseOffsets[index + 1];
   for (let i = from; i < to; i++) {
     result.add(i);
   }
 });
 this.terms.forEach((term, j) => {
   const add = terms.includes(term);
   if (add) {
     for (let k = 0; k < this.cardinalityTable[i][j]; k++) {
       result.add(p++);
     }
   } else {
     p += this.cardinalityTable[i][j];
   }
 });
 private unpackTermIndex(data: string): TypedFastBitSet {
   const terms = new Set<number>(unpack(data));
   const result = new TypedFastBitSet();
   let index = 0;
   for (let i = 0; i < this.courses.length; i++) {
     for (let j = 0; j < this.terms.length; j++) {
       const included = terms.has(j);
       for (let k = 0; k < this.cardinalityTable[i][j]; k++, index++) {
         if (included) {
           result.add(index);
         }
       }
     }
   }
   return result;
 }
function search(
  institution: InstitutionData,
  algolia: algoliasearch.Index,
  indexes: Indexes,
  transcript: { course: string }[],
  filter: FilterState
): Observable<SerializedMap> {
  function filterAny(values, getter) {
    return of(...values).pipe(
      map(x => getter(x)),
      reduce((a, b) => a.union(b), new TypedFastBitSet()),
      map(mask => results => results.intersection(mask))
    );
  }

  const subsetters = [];
  if (filter.query && filter.query.length > 0) {
    subsetters.push(
      defer(() =>
        algolia.search({
          query: filter.query,
          attributesToRetrieve: [],
          attributesToHighlight: [],
          allowTyposOnNumericTokens: false,
          hitsPerPage: 1000
        })
      ).pipe(
        map(results => results.hits.map(result => result.objectID)),
        // Map the list of course identifiers to the corresponding bitset.
        map(courses => indexes.getBitSetForCourses(courses)),
        // Apply the exact query match mask.
        map(mask =>
          mask.union(
            indexes.getBitSetForCourses(
              indexes
                .getCourses()
                .filter(x => x.includes(filter.query.toUpperCase()))
            )
          )
        ),
        map(mask => results => results.intersection(mask)),
        first() // Complete the observable.
      )
    );
  }

  if (!filter.full) {
    subsetters.push(
      defer(() => of(indexes.enrollment("full"))).pipe(
        map(mask => results => results.difference(mask))
      )
    );
  }

  if (!filter.taken) {
    subsetters.push(
      of(transcript.map(record => record.course)).pipe(
        map(courses => indexes.getBitSetForCourses(courses)),
        map(mask => results => results.difference(mask))
      )
    );
  }

  if (!filter.old) {
    subsetters.push(
      defer(() => of(indexes.getTerms())).pipe(
        map(terms => terms.slice(0, terms.length - 16)),
        map(terms => indexes.getBitSetForTerms(terms)),
        map(mask => results => results.difference(mask))
      )
    );
  }

  if (filter.departments.length) {
    subsetters.push(filterAny(filter.departments, x => indexes.department(x)));
  }
  if (filter.instructors.length) {
    subsetters.push(filterAny(filter.instructors, x => indexes.instructor(x)));
  }
  subsetters.push(
    filterAny(filter.periods.filter(x => x < institution.periods.length), x =>
      indexes.period(institution.periods[x].name)
    )
  );

  // This is a rather expensive filter...
  if (filter.days) {
    const it = filter.days
      .map(day => [1440 * day, 1440 * (day + 1)])
      .reduce((tree, interval) => tree.add(interval), new IntervalTree());
  }

  // Generate a full state.
  const state = new TypedFastBitSet();
  state.resize(indexes.getTotalCardinality());
  for (let i = 0; i < indexes.getTotalCardinality(); i++) {
    state.add(i);
  }

  return combineLatest(...subsetters).pipe(
    mergeAll(),
    reduce((state, f) => f(state), state),
    map(data => indexes.getCourseTermSections(data)),
    map(map => {
      return Array.from(map.entries()).map(([k, v]) => {
        return [k, Array.from(v.entries())];
      }) as SerializedMap;
    })
  );
}