1 line
1.1 MiB
1 line
1.1 MiB
{"version":3,"file":"index.memory.esm.js","sources":["../src/core/version.ts","../src/auth/user.ts","../src/util/error.ts","../src/api/credentials.ts","../src/api/timestamp.ts","../src/core/snapshot_version.ts","../src/model/path.ts","../src/model/document_key.ts","../src/util/obj.ts","../src/util/byte_string.ts","../src/util/types.ts","../src/model/server_timestamps.ts","../src/model/values.ts","../src/model/transform_operation.ts","../src/model/mutation.ts","../src/model/object_value.ts","../src/remote/rpc_error.ts","../src/model/document.ts","../src/core/target.ts","../src/core/query.ts","../src/local/target_data.ts","../src/remote/existence_filter.ts","../src/util/sorted_map.ts","../src/util/sorted_set.ts","../src/model/collections.ts","../src/model/document_set.ts","../src/core/view_snapshot.ts","../src/remote/remote_event.ts","../src/remote/watch_change.ts","../src/remote/serializer.ts","../src/platform/platform.ts","../src/util/log.ts","../src/util/assert.ts","../src/util/misc.ts","../src/core/database_info.ts","../src/util/obj_map.ts","../src/model/mutation_batch.ts","../src/local/persistence_promise.ts","../src/local/local_documents_view.ts","../src/local/local_view_changes.ts","../src/core/listen_sequence.ts","../src/util/promise.ts","../src/remote/backoff.ts","../src/local/memory_index_manager.ts","../src/core/target_id_generator.ts","../src/local/simple_db.ts","../src/util/async_queue.ts","../src/local/lru_garbage_collector.ts","../src/local/local_store.ts","../src/local/persistence.ts","../src/local/reference_set.ts","../src/util/input_validation.ts","../src/api/blob.ts","../src/api/field_path.ts","../src/api/field_value.ts","../src/api/geo_point.ts","../src/api/user_data_reader.ts","../src/remote/persistent_stream.ts","../src/remote/datastore.ts","../src/core/transaction.ts","../src/remote/online_state_tracker.ts","../src/remote/remote_store.ts","../src/local/shared_client_state.ts","../src/core/view.ts","../src/core/transaction_runner.ts","../src/core/sync_engine.ts","../src/core/event_manager.ts","../src/local/index_free_query_engine.ts","../src/local/memory_mutation_queue.ts","../src/local/memory_remote_document_cache.ts","../src/local/remote_document_change_buffer.ts","../src/local/memory_target_cache.ts","../src/local/memory_persistence.ts","../src/core/component_provider.ts","../src/core/firestore_client.ts","../src/util/async_observer.ts","../src/api/observer.ts","../src/api/user_data_writer.ts","../src/api/database.ts","../src/util/api.ts","../src/platform/config.ts","../src/remote/connectivity_monitor_noop.ts","../src/platform_browser/browser_connectivity_monitor.ts","../src/remote/stream_bridge.ts","../src/platform_browser/webchannel_connection.ts","../index.memory.ts","../src/platform_browser/browser_init.ts","../src/platform_browser/browser_platform.ts"],"sourcesContent":["/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport firebase from '@firebase/app';\n\n/** The semver (www.semver.org) version of the SDK. */\nexport const SDK_VERSION = firebase.SDK_VERSION;\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Simple wrapper around a nullable UID. Mostly exists to make code more\n * readable.\n */\nexport class User {\n /** A user with a null UID. */\n static readonly UNAUTHENTICATED = new User(null);\n\n // TODO(mikelehen): Look into getting a proper uid-equivalent for\n // non-FirebaseAuth providers.\n static readonly GOOGLE_CREDENTIALS = new User('google-credentials-uid');\n static readonly FIRST_PARTY = new User('first-party-uid');\n\n constructor(readonly uid: string | null) {}\n\n isAuthenticated(): boolean {\n return this.uid != null;\n }\n\n /**\n * Returns a key representing this user, suitable for inclusion in a\n * dictionary.\n */\n toKey(): string {\n if (this.isAuthenticated()) {\n return 'uid:' + this.uid;\n } else {\n return 'anonymous-user';\n }\n }\n\n isEqual(otherUser: User): boolean {\n return otherUser.uid === this.uid;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as firestore from '@firebase/firestore-types';\n\n/**\n * Error Codes describing the different ways Firestore can fail. These come\n * directly from GRPC.\n */\nexport type Code = firestore.FirestoreErrorCode;\n\nexport const Code = {\n // Causes are copied from:\n // https://github.com/grpc/grpc/blob/bceec94ea4fc5f0085d81235d8e1c06798dc341a/include/grpc%2B%2B/impl/codegen/status_code_enum.h\n /** Not an error; returned on success. */\n OK: 'ok' as Code,\n\n /** The operation was cancelled (typically by the caller). */\n CANCELLED: 'cancelled' as Code,\n\n /** Unknown error or an error from a different error domain. */\n UNKNOWN: 'unknown' as Code,\n\n /**\n * Client specified an invalid argument. Note that this differs from\n * FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments that are\n * problematic regardless of the state of the system (e.g., a malformed file\n * name).\n */\n INVALID_ARGUMENT: 'invalid-argument' as Code,\n\n /**\n * Deadline expired before operation could complete. For operations that\n * change the state of the system, this error may be returned even if the\n * operation has completed successfully. For example, a successful response\n * from a server could have been delayed long enough for the deadline to\n * expire.\n */\n DEADLINE_EXCEEDED: 'deadline-exceeded' as Code,\n\n /** Some requested entity (e.g., file or directory) was not found. */\n NOT_FOUND: 'not-found' as Code,\n\n /**\n * Some entity that we attempted to create (e.g., file or directory) already\n * exists.\n */\n ALREADY_EXISTS: 'already-exists' as Code,\n\n /**\n * The caller does not have permission to execute the specified operation.\n * PERMISSION_DENIED must not be used for rejections caused by exhausting\n * some resource (use RESOURCE_EXHAUSTED instead for those errors).\n * PERMISSION_DENIED must not be used if the caller can not be identified\n * (use UNAUTHENTICATED instead for those errors).\n */\n PERMISSION_DENIED: 'permission-denied' as Code,\n\n /**\n * The request does not have valid authentication credentials for the\n * operation.\n */\n UNAUTHENTICATED: 'unauthenticated' as Code,\n\n /**\n * Some resource has been exhausted, perhaps a per-user quota, or perhaps the\n * entire file system is out of space.\n */\n RESOURCE_EXHAUSTED: 'resource-exhausted' as Code,\n\n /**\n * Operation was rejected because the system is not in a state required for\n * the operation's execution. For example, directory to be deleted may be\n * non-empty, an rmdir operation is applied to a non-directory, etc.\n *\n * A litmus test that may help a service implementor in deciding\n * between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:\n * (a) Use UNAVAILABLE if the client can retry just the failing call.\n * (b) Use ABORTED if the client should retry at a higher-level\n * (e.g., restarting a read-modify-write sequence).\n * (c) Use FAILED_PRECONDITION if the client should not retry until\n * the system state has been explicitly fixed. E.g., if an \"rmdir\"\n * fails because the directory is non-empty, FAILED_PRECONDITION\n * should be returned since the client should not retry unless\n * they have first fixed up the directory by deleting files from it.\n * (d) Use FAILED_PRECONDITION if the client performs conditional\n * REST Get/Update/Delete on a resource and the resource on the\n * server does not match the condition. E.g., conflicting\n * read-modify-write on the same resource.\n */\n FAILED_PRECONDITION: 'failed-precondition' as Code,\n\n /**\n * The operation was aborted, typically due to a concurrency issue like\n * sequencer check failures, transaction aborts, etc.\n *\n * See litmus test above for deciding between FAILED_PRECONDITION, ABORTED,\n * and UNAVAILABLE.\n */\n ABORTED: 'aborted' as Code,\n\n /**\n * Operation was attempted past the valid range. E.g., seeking or reading\n * past end of file.\n *\n * Unlike INVALID_ARGUMENT, this error indicates a problem that may be fixed\n * if the system state changes. For example, a 32-bit file system will\n * generate INVALID_ARGUMENT if asked to read at an offset that is not in the\n * range [0,2^32-1], but it will generate OUT_OF_RANGE if asked to read from\n * an offset past the current file size.\n *\n * There is a fair bit of overlap between FAILED_PRECONDITION and\n * OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific error)\n * when it applies so that callers who are iterating through a space can\n * easily look for an OUT_OF_RANGE error to detect when they are done.\n */\n OUT_OF_RANGE: 'out-of-range' as Code,\n\n /** Operation is not implemented or not supported/enabled in this service. */\n UNIMPLEMENTED: 'unimplemented' as Code,\n\n /**\n * Internal errors. Means some invariants expected by underlying System has\n * been broken. If you see one of these errors, Something is very broken.\n */\n INTERNAL: 'internal' as Code,\n\n /**\n * The service is currently unavailable. This is a most likely a transient\n * condition and may be corrected by retrying with a backoff.\n *\n * See litmus test above for deciding between FAILED_PRECONDITION, ABORTED,\n * and UNAVAILABLE.\n */\n UNAVAILABLE: 'unavailable' as Code,\n\n /** Unrecoverable data loss or corruption. */\n DATA_LOSS: 'data-loss' as Code\n};\n\n/**\n * An error class used for Firestore-generated errors. Ideally we should be\n * using FirebaseError, but integrating with it is overly arduous at the moment,\n * so we define our own compatible error class (with a `name` of 'FirebaseError'\n * and compatible `code` and `message` fields.)\n */\nexport class FirestoreError extends Error implements firestore.FirestoreError {\n name = 'FirebaseError';\n stack?: string;\n\n constructor(readonly code: Code, readonly message: string) {\n super(message);\n\n // HACK: We write a toString property directly because Error is not a real\n // class and so inheritance does not work correctly. We could alternatively\n // do the same \"back-door inheritance\" trick that FirebaseError does.\n this.toString = () => `${this.name}: [code=${this.code}]: ${this.message}`;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { User } from '../auth/user';\nimport { hardAssert, debugAssert } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport {\n FirebaseAuthInternal,\n FirebaseAuthInternalName\n} from '@firebase/auth-interop-types';\nimport { Provider } from '@firebase/component';\n\n// TODO(mikelehen): This should be split into multiple files and probably\n// moved to an auth/ folder to match other platforms.\n\nexport interface FirstPartyCredentialsSettings {\n type: 'gapi';\n client: unknown;\n sessionIndex: string;\n}\n\nexport interface ProviderCredentialsSettings {\n type: 'provider';\n client: CredentialsProvider;\n}\n\n/** Settings for private credentials */\nexport type CredentialsSettings =\n | FirstPartyCredentialsSettings\n | ProviderCredentialsSettings;\n\nexport type TokenType = 'OAuth' | 'FirstParty';\nexport interface Token {\n /** Type of token. */\n type: TokenType;\n\n /**\n * The user with which the token is associated (used for persisting user\n * state on disk, etc.).\n */\n user: User;\n\n /** Extra header values to be passed along with a request */\n authHeaders: { [header: string]: string };\n}\n\nexport class OAuthToken implements Token {\n type = 'OAuth' as TokenType;\n authHeaders: { [header: string]: string };\n constructor(value: string, public user: User) {\n this.authHeaders = {};\n // Set the headers using Object Literal notation to avoid minification\n this.authHeaders['Authorization'] = `Bearer ${value}`;\n }\n}\n\n/**\n * A Listener for credential change events. The listener should fetch a new\n * token and may need to invalidate other state if the current user has also\n * changed.\n */\nexport type CredentialChangeListener = (user: User) => void;\n\n/**\n * Provides methods for getting the uid and token for the current user and\n * listening for changes.\n */\nexport interface CredentialsProvider {\n /** Requests a token for the current user. */\n getToken(): Promise<Token | null>;\n\n /**\n * Marks the last retrieved token as invalid, making the next GetToken request\n * force-refresh the token.\n */\n invalidateToken(): void;\n\n /**\n * Specifies a listener to be notified of credential changes\n * (sign-in / sign-out, token changes). It is immediately called once with the\n * initial user.\n */\n setChangeListener(changeListener: CredentialChangeListener): void;\n\n /** Removes the previously-set change listener. */\n removeChangeListener(): void;\n}\n\n/** A CredentialsProvider that always yields an empty token. */\nexport class EmptyCredentialsProvider implements CredentialsProvider {\n /**\n * Stores the listener registered with setChangeListener()\n * This isn't actually necessary since the UID never changes, but we use this\n * to verify the listen contract is adhered to in tests.\n */\n private changeListener: CredentialChangeListener | null = null;\n\n getToken(): Promise<Token | null> {\n return Promise.resolve<Token | null>(null);\n }\n\n invalidateToken(): void {}\n\n setChangeListener(changeListener: CredentialChangeListener): void {\n debugAssert(\n !this.changeListener,\n 'Can only call setChangeListener() once.'\n );\n this.changeListener = changeListener;\n // Fire with initial user.\n changeListener(User.UNAUTHENTICATED);\n }\n\n removeChangeListener(): void {\n debugAssert(\n this.changeListener !== null,\n 'removeChangeListener() when no listener registered'\n );\n this.changeListener = null;\n }\n}\n\nexport class FirebaseCredentialsProvider implements CredentialsProvider {\n /**\n * The auth token listener registered with FirebaseApp, retained here so we\n * can unregister it.\n */\n private tokenListener: ((token: string | null) => void) | null = null;\n\n /** Tracks the current User. */\n private currentUser: User = User.UNAUTHENTICATED;\n private receivedInitialUser: boolean = false;\n\n /**\n * Counter used to detect if the token changed while a getToken request was\n * outstanding.\n */\n private tokenCounter = 0;\n\n /** The listener registered with setChangeListener(). */\n private changeListener: CredentialChangeListener | null = null;\n\n private forceRefresh = false;\n\n private auth: FirebaseAuthInternal | null;\n\n constructor(authProvider: Provider<FirebaseAuthInternalName>) {\n this.tokenListener = () => {\n this.tokenCounter++;\n this.currentUser = this.getUser();\n this.receivedInitialUser = true;\n if (this.changeListener) {\n this.changeListener(this.currentUser);\n }\n };\n\n this.tokenCounter = 0;\n\n this.auth = authProvider.getImmediate({ optional: true });\n\n if (this.auth) {\n this.auth.addAuthTokenListener(this.tokenListener!);\n } else {\n // if auth is not available, invoke tokenListener once with null token\n this.tokenListener(null);\n authProvider.get().then(\n auth => {\n this.auth = auth;\n if (this.tokenListener) {\n // tokenListener can be removed by removeChangeListener()\n this.auth.addAuthTokenListener(this.tokenListener);\n }\n },\n () => {\n /* this.authProvider.get() never rejects */\n }\n );\n }\n }\n\n getToken(): Promise<Token | null> {\n debugAssert(\n this.tokenListener != null,\n 'getToken cannot be called after listener removed.'\n );\n\n // Take note of the current value of the tokenCounter so that this method\n // can fail (with an ABORTED error) if there is a token change while the\n // request is outstanding.\n const initialTokenCounter = this.tokenCounter;\n const forceRefresh = this.forceRefresh;\n this.forceRefresh = false;\n\n if (!this.auth) {\n return Promise.resolve(null);\n }\n\n return this.auth.getToken(forceRefresh).then(tokenData => {\n // Cancel the request since the token changed while the request was\n // outstanding so the response is potentially for a previous user (which\n // user, we can't be sure).\n if (this.tokenCounter !== initialTokenCounter) {\n throw new FirestoreError(\n Code.ABORTED,\n 'getToken aborted due to token change.'\n );\n } else {\n if (tokenData) {\n hardAssert(\n typeof tokenData.accessToken === 'string',\n 'Invalid tokenData returned from getToken():' + tokenData\n );\n return new OAuthToken(tokenData.accessToken, this.currentUser);\n } else {\n return null;\n }\n }\n });\n }\n\n invalidateToken(): void {\n this.forceRefresh = true;\n }\n\n setChangeListener(changeListener: CredentialChangeListener): void {\n debugAssert(\n !this.changeListener,\n 'Can only call setChangeListener() once.'\n );\n this.changeListener = changeListener;\n\n // Fire the initial event\n if (this.receivedInitialUser) {\n changeListener(this.currentUser);\n }\n }\n\n removeChangeListener(): void {\n debugAssert(\n this.tokenListener != null,\n 'removeChangeListener() called twice'\n );\n debugAssert(\n this.changeListener !== null,\n 'removeChangeListener() called when no listener registered'\n );\n\n if (this.auth) {\n this.auth.removeAuthTokenListener(this.tokenListener!);\n }\n this.tokenListener = null;\n this.changeListener = null;\n }\n\n // Auth.getUid() can return null even with a user logged in. It is because\n // getUid() is synchronous, but the auth code populating Uid is asynchronous.\n // This method should only be called in the AuthTokenListener callback\n // to guarantee to get the actual user.\n private getUser(): User {\n const currentUid = this.auth && this.auth.getUid();\n hardAssert(\n currentUid === null || typeof currentUid === 'string',\n 'Received invalid UID: ' + currentUid\n );\n return new User(currentUid);\n }\n}\n\n// Manual type definition for the subset of Gapi we use.\ninterface Gapi {\n auth: {\n getAuthHeaderValueForFirstParty: (\n userIdentifiers: Array<{ [key: string]: string }>\n ) => string | null;\n };\n}\n\n/*\n * FirstPartyToken provides a fresh token each time its value\n * is requested, because if the token is too old, requests will be rejected.\n * Technically this may no longer be necessary since the SDK should gracefully\n * recover from unauthenticated errors (see b/33147818 for context), but it's\n * safer to keep the implementation as-is.\n */\nexport class FirstPartyToken implements Token {\n type = 'FirstParty' as TokenType;\n user = User.FIRST_PARTY;\n\n constructor(private gapi: Gapi, private sessionIndex: string) {}\n\n get authHeaders(): { [header: string]: string } {\n const headers: { [header: string]: string } = {\n 'X-Goog-AuthUser': this.sessionIndex\n };\n const authHeader = this.gapi.auth.getAuthHeaderValueForFirstParty([]);\n if (authHeader) {\n headers['Authorization'] = authHeader;\n }\n return headers;\n }\n}\n\n/*\n * Provides user credentials required for the Firestore JavaScript SDK\n * to authenticate the user, using technique that is only available\n * to applications hosted by Google.\n */\nexport class FirstPartyCredentialsProvider implements CredentialsProvider {\n constructor(private gapi: Gapi, private sessionIndex: string) {}\n\n getToken(): Promise<Token | null> {\n return Promise.resolve(new FirstPartyToken(this.gapi, this.sessionIndex));\n }\n\n setChangeListener(changeListener: CredentialChangeListener): void {\n // Fire with initial uid.\n changeListener(User.FIRST_PARTY);\n }\n\n removeChangeListener(): void {}\n\n invalidateToken(): void {}\n}\n\n/**\n * Builds a CredentialsProvider depending on the type of\n * the credentials passed in.\n */\nexport function makeCredentialsProvider(\n credentials?: CredentialsSettings\n): CredentialsProvider {\n if (!credentials) {\n return new EmptyCredentialsProvider();\n }\n\n switch (credentials.type) {\n case 'gapi':\n const client = credentials.client as Gapi;\n // Make sure this really is a Gapi client.\n hardAssert(\n !!(\n typeof client === 'object' &&\n client !== null &&\n client['auth'] &&\n client['auth']['getAuthHeaderValueForFirstParty']\n ),\n 'unexpected gapi interface'\n );\n return new FirstPartyCredentialsProvider(\n client,\n credentials.sessionIndex || '0'\n );\n\n case 'provider':\n return credentials.client;\n\n default:\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'makeCredentialsProvider failed due to invalid credential type'\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Code, FirestoreError } from '../util/error';\nimport { primitiveComparator } from '../util/misc';\n\n// The earlist date supported by Firestore timestamps (0001-01-01T00:00:00Z).\nconst MIN_SECONDS = -62135596800;\n\nexport class Timestamp {\n static now(): Timestamp {\n return Timestamp.fromMillis(Date.now());\n }\n\n static fromDate(date: Date): Timestamp {\n return Timestamp.fromMillis(date.getTime());\n }\n\n static fromMillis(milliseconds: number): Timestamp {\n const seconds = Math.floor(milliseconds / 1000);\n const nanos = (milliseconds - seconds * 1000) * 1e6;\n return new Timestamp(seconds, nanos);\n }\n\n constructor(readonly seconds: number, readonly nanoseconds: number) {\n if (nanoseconds < 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Timestamp nanoseconds out of range: ' + nanoseconds\n );\n }\n if (nanoseconds >= 1e9) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Timestamp nanoseconds out of range: ' + nanoseconds\n );\n }\n if (seconds < MIN_SECONDS) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Timestamp seconds out of range: ' + seconds\n );\n }\n // This will break in the year 10,000.\n if (seconds >= 253402300800) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Timestamp seconds out of range: ' + seconds\n );\n }\n }\n\n toDate(): Date {\n return new Date(this.toMillis());\n }\n\n toMillis(): number {\n return this.seconds * 1000 + this.nanoseconds / 1e6;\n }\n\n _compareTo(other: Timestamp): number {\n if (this.seconds === other.seconds) {\n return primitiveComparator(this.nanoseconds, other.nanoseconds);\n }\n return primitiveComparator(this.seconds, other.seconds);\n }\n\n isEqual(other: Timestamp): boolean {\n return (\n other.seconds === this.seconds && other.nanoseconds === this.nanoseconds\n );\n }\n\n toString(): string {\n return (\n 'Timestamp(seconds=' +\n this.seconds +\n ', nanoseconds=' +\n this.nanoseconds +\n ')'\n );\n }\n\n valueOf(): string {\n // This method returns a string of the form <seconds>.<nanoseconds> where <seconds> is\n // translated to have a non-negative value and both <seconds> and <nanoseconds> are left-padded\n // with zeroes to be a consistent length. Strings with this format then have a lexiographical\n // ordering that matches the expected ordering. The <seconds> translation is done to avoid\n // having a leading negative sign (i.e. a leading '-' character) in its string representation,\n // which would affect its lexiographical ordering.\n const adjustedSeconds = this.seconds - MIN_SECONDS;\n // Note: Up to 12 decimal digits are required to represent all valid 'seconds' values.\n const formattedSeconds = String(adjustedSeconds).padStart(12, '0');\n const formattedNanoseconds = String(this.nanoseconds).padStart(9, '0');\n return formattedSeconds + '.' + formattedNanoseconds;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Timestamp } from '../api/timestamp';\n\n/**\n * A version of a document in Firestore. This corresponds to the version\n * timestamp, such as update_time or read_time.\n */\nexport class SnapshotVersion {\n static fromTimestamp(value: Timestamp): SnapshotVersion {\n return new SnapshotVersion(value);\n }\n\n static min(): SnapshotVersion {\n return new SnapshotVersion(new Timestamp(0, 0));\n }\n\n private constructor(private timestamp: Timestamp) {}\n\n compareTo(other: SnapshotVersion): number {\n return this.timestamp._compareTo(other.timestamp);\n }\n\n isEqual(other: SnapshotVersion): boolean {\n return this.timestamp.isEqual(other.timestamp);\n }\n\n /** Returns a number representation of the version for use in spec tests. */\n toMicroseconds(): number {\n // Convert to microseconds.\n return this.timestamp.seconds * 1e6 + this.timestamp.nanoseconds / 1000;\n }\n\n toString(): string {\n return 'SnapshotVersion(' + this.timestamp.toString() + ')';\n }\n\n toTimestamp(): Timestamp {\n return this.timestamp;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { debugAssert, fail } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\n\nexport const DOCUMENT_KEY_NAME = '__name__';\n\n/**\n * Path represents an ordered sequence of string segments.\n */\nabstract class BasePath<B extends BasePath<B>> {\n private segments: string[];\n private offset: number;\n private len: number;\n\n constructor(segments: string[], offset?: number, length?: number) {\n if (offset === undefined) {\n offset = 0;\n } else if (offset > segments.length) {\n fail('offset ' + offset + ' out of range ' + segments.length);\n }\n\n if (length === undefined) {\n length = segments.length - offset;\n } else if (length > segments.length - offset) {\n fail('length ' + length + ' out of range ' + (segments.length - offset));\n }\n this.segments = segments;\n this.offset = offset;\n this.len = length;\n }\n\n /**\n * Abstract constructor method to construct an instance of B with the given\n * parameters.\n */\n protected abstract construct(\n segments: string[],\n offset?: number,\n length?: number\n ): B;\n\n /**\n * Returns a String representation.\n *\n * Implementing classes are required to provide deterministic implementations as\n * the String representation is used to obtain canonical Query IDs.\n */\n abstract toString(): string;\n\n get length(): number {\n return this.len;\n }\n\n isEqual(other: B): boolean {\n return BasePath.comparator(this, other) === 0;\n }\n\n child(nameOrPath: string | B): B {\n const segments = this.segments.slice(this.offset, this.limit());\n if (nameOrPath instanceof BasePath) {\n nameOrPath.forEach(segment => {\n segments.push(segment);\n });\n } else {\n segments.push(nameOrPath);\n }\n return this.construct(segments);\n }\n\n /** The index of one past the last segment of the path. */\n private limit(): number {\n return this.offset + this.length;\n }\n\n popFirst(size?: number): B {\n size = size === undefined ? 1 : size;\n debugAssert(\n this.length >= size,\n \"Can't call popFirst() with less segments\"\n );\n return this.construct(\n this.segments,\n this.offset + size,\n this.length - size\n );\n }\n\n popLast(): B {\n debugAssert(!this.isEmpty(), \"Can't call popLast() on empty path\");\n return this.construct(this.segments, this.offset, this.length - 1);\n }\n\n firstSegment(): string {\n debugAssert(!this.isEmpty(), \"Can't call firstSegment() on empty path\");\n return this.segments[this.offset];\n }\n\n lastSegment(): string {\n return this.get(this.length - 1);\n }\n\n get(index: number): string {\n debugAssert(index < this.length, 'Index out of range');\n return this.segments[this.offset + index];\n }\n\n isEmpty(): boolean {\n return this.length === 0;\n }\n\n isPrefixOf(other: this): boolean {\n if (other.length < this.length) {\n return false;\n }\n\n for (let i = 0; i < this.length; i++) {\n if (this.get(i) !== other.get(i)) {\n return false;\n }\n }\n\n return true;\n }\n\n isImmediateParentOf(potentialChild: this): boolean {\n if (this.length + 1 !== potentialChild.length) {\n return false;\n }\n\n for (let i = 0; i < this.length; i++) {\n if (this.get(i) !== potentialChild.get(i)) {\n return false;\n }\n }\n\n return true;\n }\n\n forEach(fn: (segment: string) => void): void {\n for (let i = this.offset, end = this.limit(); i < end; i++) {\n fn(this.segments[i]);\n }\n }\n\n toArray(): string[] {\n return this.segments.slice(this.offset, this.limit());\n }\n\n static comparator<T extends BasePath<T>>(\n p1: BasePath<T>,\n p2: BasePath<T>\n ): number {\n const len = Math.min(p1.length, p2.length);\n for (let i = 0; i < len; i++) {\n const left = p1.get(i);\n const right = p2.get(i);\n if (left < right) {\n return -1;\n }\n if (left > right) {\n return 1;\n }\n }\n if (p1.length < p2.length) {\n return -1;\n }\n if (p1.length > p2.length) {\n return 1;\n }\n return 0;\n }\n}\n\n/**\n * A slash-separated path for navigating resources (documents and collections)\n * within Firestore.\n */\nexport class ResourcePath extends BasePath<ResourcePath> {\n protected construct(\n segments: string[],\n offset?: number,\n length?: number\n ): ResourcePath {\n return new ResourcePath(segments, offset, length);\n }\n\n canonicalString(): string {\n // NOTE: The client is ignorant of any path segments containing escape\n // sequences (e.g. __id123__) and just passes them through raw (they exist\n // for legacy reasons and should not be used frequently).\n\n return this.toArray().join('/');\n }\n\n toString(): string {\n return this.canonicalString();\n }\n\n /**\n * Creates a resource path from the given slash-delimited string.\n */\n static fromString(path: string): ResourcePath {\n // NOTE: The client is ignorant of any path segments containing escape\n // sequences (e.g. __id123__) and just passes them through raw (they exist\n // for legacy reasons and should not be used frequently).\n\n if (path.indexOf('//') >= 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid path (${path}). Paths must not contain // in them.`\n );\n }\n\n // We may still have an empty segment at the beginning or end if they had a\n // leading or trailing slash (which we allow).\n const segments = path.split('/').filter(segment => segment.length > 0);\n\n return new ResourcePath(segments);\n }\n\n static EMPTY_PATH = new ResourcePath([]);\n}\n\nconst identifierRegExp = /^[_a-zA-Z][_a-zA-Z0-9]*$/;\n\n/** A dot-separated path for navigating sub-objects within a document. */\nexport class FieldPath extends BasePath<FieldPath> {\n protected construct(\n segments: string[],\n offset?: number,\n length?: number\n ): FieldPath {\n return new FieldPath(segments, offset, length);\n }\n\n /**\n * Returns true if the string could be used as a segment in a field path\n * without escaping.\n */\n private static isValidIdentifier(segment: string): boolean {\n return identifierRegExp.test(segment);\n }\n\n canonicalString(): string {\n return this.toArray()\n .map(str => {\n str = str.replace('\\\\', '\\\\\\\\').replace('`', '\\\\`');\n if (!FieldPath.isValidIdentifier(str)) {\n str = '`' + str + '`';\n }\n return str;\n })\n .join('.');\n }\n\n toString(): string {\n return this.canonicalString();\n }\n\n /**\n * Returns true if this field references the key of a document.\n */\n isKeyField(): boolean {\n return this.length === 1 && this.get(0) === DOCUMENT_KEY_NAME;\n }\n\n /**\n * The field designating the key of a document.\n */\n static keyField(): FieldPath {\n return new FieldPath([DOCUMENT_KEY_NAME]);\n }\n\n /**\n * Parses a field string from the given server-formatted string.\n *\n * - Splitting the empty string is not allowed (for now at least).\n * - Empty segments within the string (e.g. if there are two consecutive\n * separators) are not allowed.\n *\n * TODO(b/37244157): we should make this more strict. Right now, it allows\n * non-identifier path components, even if they aren't escaped.\n */\n static fromServerFormat(path: string): FieldPath {\n const segments: string[] = [];\n let current = '';\n let i = 0;\n\n const addCurrentSegment = (): void => {\n if (current.length === 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid field path (${path}). Paths must not be empty, begin ` +\n `with '.', end with '.', or contain '..'`\n );\n }\n segments.push(current);\n current = '';\n };\n\n let inBackticks = false;\n\n while (i < path.length) {\n const c = path[i];\n if (c === '\\\\') {\n if (i + 1 === path.length) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Path has trailing escape character: ' + path\n );\n }\n const next = path[i + 1];\n if (!(next === '\\\\' || next === '.' || next === '`')) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Path has invalid escape sequence: ' + path\n );\n }\n current += next;\n i += 2;\n } else if (c === '`') {\n inBackticks = !inBackticks;\n i++;\n } else if (c === '.' && !inBackticks) {\n addCurrentSegment();\n i++;\n } else {\n current += c;\n i++;\n }\n }\n addCurrentSegment();\n\n if (inBackticks) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Unterminated ` in path: ' + path\n );\n }\n\n return new FieldPath(segments);\n }\n\n static EMPTY_PATH = new FieldPath([]);\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { debugAssert } from '../util/assert';\n\nimport { ResourcePath } from './path';\n\nexport class DocumentKey {\n constructor(readonly path: ResourcePath) {\n debugAssert(\n DocumentKey.isDocumentKey(path),\n 'Invalid DocumentKey with an odd number of segments: ' +\n path.toArray().join('/')\n );\n }\n\n static fromName(name: string): DocumentKey {\n return new DocumentKey(ResourcePath.fromString(name).popFirst(5));\n }\n\n /** Returns true if the document is in the specified collectionId. */\n hasCollectionId(collectionId: string): boolean {\n return (\n this.path.length >= 2 &&\n this.path.get(this.path.length - 2) === collectionId\n );\n }\n\n isEqual(other: DocumentKey | null): boolean {\n return (\n other !== null && ResourcePath.comparator(this.path, other.path) === 0\n );\n }\n\n toString(): string {\n return this.path.toString();\n }\n\n static EMPTY = new DocumentKey(new ResourcePath([]));\n\n static comparator(k1: DocumentKey, k2: DocumentKey): number {\n return ResourcePath.comparator(k1.path, k2.path);\n }\n\n static isDocumentKey(path: ResourcePath): boolean {\n return path.length % 2 === 0;\n }\n\n /**\n * Creates and returns a new document key with the given segments.\n *\n * @param segments The segments of the path to the document\n * @return A new instance of DocumentKey\n */\n static fromSegments(segments: string[]): DocumentKey {\n return new DocumentKey(new ResourcePath(segments.slice()));\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { debugAssert } from './assert';\n\nexport interface Dict<V> {\n [stringKey: string]: V;\n}\n\nexport function objectSize<V>(obj: object): number {\n let count = 0;\n for (const key in obj) {\n if (Object.prototype.hasOwnProperty.call(obj, key)) {\n count++;\n }\n }\n return count;\n}\n\nexport function forEach<V>(\n obj: Dict<V>,\n fn: (key: string, val: V) => void\n): void {\n for (const key in obj) {\n if (Object.prototype.hasOwnProperty.call(obj, key)) {\n fn(key, obj[key]);\n }\n }\n}\n\nexport function isEmpty<V>(obj: Dict<V>): boolean {\n debugAssert(\n obj != null && typeof obj === 'object',\n 'isEmpty() expects object parameter.'\n );\n for (const key in obj) {\n if (Object.prototype.hasOwnProperty.call(obj, key)) {\n return false;\n }\n }\n return true;\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { PlatformSupport } from '../platform/platform';\nimport { primitiveComparator } from './misc';\n\n/**\n * Immutable class that represents a \"proto\" byte string.\n *\n * Proto byte strings can either be Base64-encoded strings or Uint8Arrays when\n * sent on the wire. This class abstracts away this differentiation by holding\n * the proto byte string in a common class that must be converted into a string\n * before being sent as a proto.\n */\nexport class ByteString {\n static readonly EMPTY_BYTE_STRING = new ByteString('');\n\n private constructor(private readonly binaryString: string) {}\n\n static fromBase64String(base64: string): ByteString {\n const binaryString = PlatformSupport.getPlatform().atob(base64);\n return new ByteString(binaryString);\n }\n\n static fromUint8Array(array: Uint8Array): ByteString {\n const binaryString = binaryStringFromUint8Array(array);\n return new ByteString(binaryString);\n }\n\n toBase64(): string {\n return PlatformSupport.getPlatform().btoa(this.binaryString);\n }\n\n toUint8Array(): Uint8Array {\n return uint8ArrayFromBinaryString(this.binaryString);\n }\n\n approximateByteSize(): number {\n return this.binaryString.length * 2;\n }\n\n compareTo(other: ByteString): number {\n return primitiveComparator(this.binaryString, other.binaryString);\n }\n\n isEqual(other: ByteString): boolean {\n return this.binaryString === other.binaryString;\n }\n}\n\n/**\n * Helper function to convert an Uint8array to a binary string.\n */\nexport function binaryStringFromUint8Array(array: Uint8Array): string {\n let binaryString = '';\n for (let i = 0; i < array.length; ++i) {\n binaryString += String.fromCharCode(array[i]);\n }\n return binaryString;\n}\n\n/**\n * Helper function to convert a binary string to an Uint8Array.\n */\nexport function uint8ArrayFromBinaryString(binaryString: string): Uint8Array {\n const buffer = new Uint8Array(binaryString.length);\n for (let i = 0; i < binaryString.length; i++) {\n buffer[i] = binaryString.charCodeAt(i);\n }\n return buffer;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// An Object whose keys and values are strings.\nexport interface StringMap {\n [key: string]: string;\n}\n\n/**\n * Returns whether a variable is either undefined or null.\n */\nexport function isNullOrUndefined(value: unknown): value is null | undefined {\n return value === null || value === undefined;\n}\n\n/** Returns whether the value represents -0. */\nexport function isNegativeZero(value: number): boolean {\n // Detect if the value is -0.0. Based on polyfill from\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is\n return value === -0 && 1 / value === 1 / -0;\n}\n\n/**\n * Returns whether a value is an integer and in the safe integer range\n * @param value The value to test for being an integer and in the safe range\n */\nexport function isSafeInteger(value: unknown): boolean {\n return (\n typeof value === 'number' &&\n Number.isInteger(value) &&\n !isNegativeZero(value) &&\n value <= Number.MAX_SAFE_INTEGER &&\n value >= Number.MIN_SAFE_INTEGER\n );\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as api from '../protos/firestore_proto_api';\nimport { Timestamp } from '../api/timestamp';\nimport { normalizeTimestamp } from './values';\n\n/**\n * Represents a locally-applied ServerTimestamp.\n *\n * Server Timestamps are backed by MapValues that contain an internal field\n * `__type__` with a value of `server_timestamp`. The previous value and local\n * write time are stored in its `__previous_value__` and `__local_write_time__`\n * fields respectively.\n *\n * Notes:\n * - ServerTimestampValue instances are created as the result of applying a\n * TransformMutation (see TransformMutation.applyTo()). They can only exist in\n * the local view of a document. Therefore they do not need to be parsed or\n * serialized.\n * - When evaluated locally (e.g. for snapshot.data()), they by default\n * evaluate to `null`. This behavior can be configured by passing custom\n * FieldValueOptions to value().\n * - With respect to other ServerTimestampValues, they sort by their\n * localWriteTime.\n */\n\nconst SERVER_TIMESTAMP_SENTINEL = 'server_timestamp';\nconst TYPE_KEY = '__type__';\nconst PREVIOUS_VALUE_KEY = '__previous_value__';\nconst LOCAL_WRITE_TIME_KEY = '__local_write_time__';\n\nexport function isServerTimestamp(value: api.Value | null): boolean {\n const type = (value?.mapValue?.fields || {})[TYPE_KEY]?.stringValue;\n return type === SERVER_TIMESTAMP_SENTINEL;\n}\n\n/**\n * Creates a new ServerTimestamp proto value (using the internal format).\n */\nexport function serverTimestamp(\n localWriteTime: Timestamp,\n previousValue: api.Value | null\n): api.Value {\n const mapValue: api.MapValue = {\n fields: {\n [TYPE_KEY]: {\n stringValue: SERVER_TIMESTAMP_SENTINEL\n },\n [LOCAL_WRITE_TIME_KEY]: {\n timestampValue: {\n seconds: localWriteTime.seconds,\n nanos: localWriteTime.nanoseconds\n }\n }\n }\n };\n\n if (previousValue) {\n mapValue.fields![PREVIOUS_VALUE_KEY] = previousValue;\n }\n\n return { mapValue };\n}\n\n/**\n * Returns the value of the field before this ServerTimestamp was set.\n *\n * Preserving the previous values allows the user to display the last resoled\n * value until the backend responds with the timestamp.\n */\nexport function getPreviousValue(value: api.Value): api.Value | null {\n const previousValue = value.mapValue!.fields![PREVIOUS_VALUE_KEY];\n\n if (isServerTimestamp(previousValue)) {\n return getPreviousValue(previousValue);\n }\n return previousValue;\n}\n\n/**\n * Returns the local time at which this timestamp was first set.\n */\nexport function getLocalWriteTime(value: api.Value): Timestamp {\n const localWriteTime = normalizeTimestamp(\n value.mapValue!.fields![LOCAL_WRITE_TIME_KEY].timestampValue!\n );\n return new Timestamp(localWriteTime.seconds, localWriteTime.nanos);\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as api from '../protos/firestore_proto_api';\n\nimport { TypeOrder } from './object_value';\nimport { fail, hardAssert } from '../util/assert';\nimport { forEach, objectSize } from '../util/obj';\nimport { ByteString } from '../util/byte_string';\nimport { isNegativeZero } from '../util/types';\nimport { DocumentKey } from './document_key';\nimport { arrayEquals, primitiveComparator } from '../util/misc';\nimport { DatabaseId } from '../core/database_info';\nimport {\n getLocalWriteTime,\n getPreviousValue,\n isServerTimestamp\n} from './server_timestamps';\n\n// A RegExp matching ISO 8601 UTC timestamps with optional fraction.\nconst ISO_TIMESTAMP_REG_EXP = new RegExp(\n /^\\d{4}-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d(?:\\.(\\d+))?Z$/\n);\n\n/** Extracts the backend's type order for the provided value. */\nexport function typeOrder(value: api.Value): TypeOrder {\n if ('nullValue' in value) {\n return TypeOrder.NullValue;\n } else if ('booleanValue' in value) {\n return TypeOrder.BooleanValue;\n } else if ('integerValue' in value || 'doubleValue' in value) {\n return TypeOrder.NumberValue;\n } else if ('timestampValue' in value) {\n return TypeOrder.TimestampValue;\n } else if ('stringValue' in value) {\n return TypeOrder.StringValue;\n } else if ('bytesValue' in value) {\n return TypeOrder.BlobValue;\n } else if ('referenceValue' in value) {\n return TypeOrder.RefValue;\n } else if ('geoPointValue' in value) {\n return TypeOrder.GeoPointValue;\n } else if ('arrayValue' in value) {\n return TypeOrder.ArrayValue;\n } else if ('mapValue' in value) {\n if (isServerTimestamp(value)) {\n return TypeOrder.ServerTimestampValue;\n }\n return TypeOrder.ObjectValue;\n } else {\n return fail('Invalid value type: ' + JSON.stringify(value));\n }\n}\n\n/** Tests `left` and `right` for equality based on the backend semantics. */\nexport function valueEquals(left: api.Value, right: api.Value): boolean {\n const leftType = typeOrder(left);\n const rightType = typeOrder(right);\n if (leftType !== rightType) {\n return false;\n }\n\n switch (leftType) {\n case TypeOrder.NullValue:\n return true;\n case TypeOrder.BooleanValue:\n return left.booleanValue === right.booleanValue;\n case TypeOrder.ServerTimestampValue:\n return getLocalWriteTime(left).isEqual(getLocalWriteTime(right));\n case TypeOrder.TimestampValue:\n return timestampEquals(left, right);\n case TypeOrder.StringValue:\n return left.stringValue === right.stringValue;\n case TypeOrder.BlobValue:\n return blobEquals(left, right);\n case TypeOrder.RefValue:\n return left.referenceValue === right.referenceValue;\n case TypeOrder.GeoPointValue:\n return geoPointEquals(left, right);\n case TypeOrder.NumberValue:\n return numberEquals(left, right);\n case TypeOrder.ArrayValue:\n return arrayEquals(\n left.arrayValue!.values || [],\n right.arrayValue!.values || [],\n valueEquals\n );\n case TypeOrder.ObjectValue:\n return objectEquals(left, right);\n default:\n return fail('Unexpected value type: ' + JSON.stringify(left));\n }\n}\n\nfunction timestampEquals(left: api.Value, right: api.Value): boolean {\n if (\n typeof left.timestampValue === 'string' &&\n typeof right.timestampValue === 'string' &&\n left.timestampValue.length === right.timestampValue.length\n ) {\n // Use string equality for ISO 8601 timestamps\n return left.timestampValue === right.timestampValue;\n }\n\n const leftTimestamp = normalizeTimestamp(left.timestampValue!);\n const rightTimestamp = normalizeTimestamp(right.timestampValue!);\n return (\n leftTimestamp.seconds === rightTimestamp.seconds &&\n leftTimestamp.nanos === rightTimestamp.nanos\n );\n}\n\nfunction geoPointEquals(left: api.Value, right: api.Value): boolean {\n return (\n normalizeNumber(left.geoPointValue!.latitude) ===\n normalizeNumber(right.geoPointValue!.latitude) &&\n normalizeNumber(left.geoPointValue!.longitude) ===\n normalizeNumber(right.geoPointValue!.longitude)\n );\n}\n\nfunction blobEquals(left: api.Value, right: api.Value): boolean {\n return normalizeByteString(left.bytesValue!).isEqual(\n normalizeByteString(right.bytesValue!)\n );\n}\n\nexport function numberEquals(left: api.Value, right: api.Value): boolean {\n if ('integerValue' in left && 'integerValue' in right) {\n return (\n normalizeNumber(left.integerValue) === normalizeNumber(right.integerValue)\n );\n } else if ('doubleValue' in left && 'doubleValue' in right) {\n const n1 = normalizeNumber(left.doubleValue!);\n const n2 = normalizeNumber(right.doubleValue!);\n\n if (n1 === n2) {\n return isNegativeZero(n1) === isNegativeZero(n2);\n } else {\n return isNaN(n1) && isNaN(n2);\n }\n }\n\n return false;\n}\n\nfunction objectEquals(left: api.Value, right: api.Value): boolean {\n const leftMap = left.mapValue!.fields || {};\n const rightMap = right.mapValue!.fields || {};\n\n if (objectSize(leftMap) !== objectSize(rightMap)) {\n return false;\n }\n\n for (const key in leftMap) {\n if (leftMap.hasOwnProperty(key)) {\n if (\n rightMap[key] === undefined ||\n !valueEquals(leftMap[key], rightMap[key])\n ) {\n return false;\n }\n }\n }\n return true;\n}\n\n/** Returns true if the ArrayValue contains the specified element. */\nexport function arrayValueContains(\n haystack: api.ArrayValue,\n needle: api.Value\n): boolean {\n return (\n (haystack.values || []).find(v => valueEquals(v, needle)) !== undefined\n );\n}\n\nexport function valueCompare(left: api.Value, right: api.Value): number {\n const leftType = typeOrder(left);\n const rightType = typeOrder(right);\n\n if (leftType !== rightType) {\n return primitiveComparator(leftType, rightType);\n }\n\n switch (leftType) {\n case TypeOrder.NullValue:\n return 0;\n case TypeOrder.BooleanValue:\n return primitiveComparator(left.booleanValue!, right.booleanValue!);\n case TypeOrder.NumberValue:\n return compareNumbers(left, right);\n case TypeOrder.TimestampValue:\n return compareTimestamps(left.timestampValue!, right.timestampValue!);\n case TypeOrder.ServerTimestampValue:\n return compareTimestamps(\n getLocalWriteTime(left),\n getLocalWriteTime(right)\n );\n case TypeOrder.StringValue:\n return primitiveComparator(left.stringValue!, right.stringValue!);\n case TypeOrder.BlobValue:\n return compareBlobs(left.bytesValue!, right.bytesValue!);\n case TypeOrder.RefValue:\n return compareReferences(left.referenceValue!, right.referenceValue!);\n case TypeOrder.GeoPointValue:\n return compareGeoPoints(left.geoPointValue!, right.geoPointValue!);\n case TypeOrder.ArrayValue:\n return compareArrays(left.arrayValue!, right.arrayValue!);\n case TypeOrder.ObjectValue:\n return compareMaps(left.mapValue!, right.mapValue!);\n default:\n throw fail('Invalid value type: ' + leftType);\n }\n}\n\nfunction compareNumbers(left: api.Value, right: api.Value): number {\n const leftNumber = normalizeNumber(left.integerValue || left.doubleValue);\n const rightNumber = normalizeNumber(right.integerValue || right.doubleValue);\n\n if (leftNumber < rightNumber) {\n return -1;\n } else if (leftNumber > rightNumber) {\n return 1;\n } else if (leftNumber === rightNumber) {\n return 0;\n } else {\n // one or both are NaN.\n if (isNaN(leftNumber)) {\n return isNaN(rightNumber) ? 0 : -1;\n } else {\n return 1;\n }\n }\n}\n\nfunction compareTimestamps(left: api.Timestamp, right: api.Timestamp): number {\n if (\n typeof left === 'string' &&\n typeof right === 'string' &&\n left.length === right.length\n ) {\n return primitiveComparator(left, right);\n }\n\n const leftTimestamp = normalizeTimestamp(left);\n const rightTimestamp = normalizeTimestamp(right);\n\n const comparison = primitiveComparator(\n leftTimestamp.seconds,\n rightTimestamp.seconds\n );\n if (comparison !== 0) {\n return comparison;\n }\n return primitiveComparator(leftTimestamp.nanos, rightTimestamp.nanos);\n}\n\nfunction compareReferences(leftPath: string, rightPath: string): number {\n const leftSegments = leftPath.split('/');\n const rightSegments = rightPath.split('/');\n for (let i = 0; i < leftSegments.length && i < rightSegments.length; i++) {\n const comparison = primitiveComparator(leftSegments[i], rightSegments[i]);\n if (comparison !== 0) {\n return comparison;\n }\n }\n return primitiveComparator(leftSegments.length, rightSegments.length);\n}\n\nfunction compareGeoPoints(left: api.LatLng, right: api.LatLng): number {\n const comparison = primitiveComparator(\n normalizeNumber(left.latitude),\n normalizeNumber(right.latitude)\n );\n if (comparison !== 0) {\n return comparison;\n }\n return primitiveComparator(\n normalizeNumber(left.longitude),\n normalizeNumber(right.longitude)\n );\n}\n\nfunction compareBlobs(\n left: string | Uint8Array,\n right: string | Uint8Array\n): number {\n const leftBytes = normalizeByteString(left);\n const rightBytes = normalizeByteString(right);\n return leftBytes.compareTo(rightBytes);\n}\n\nfunction compareArrays(left: api.ArrayValue, right: api.ArrayValue): number {\n const leftArray = left.values || [];\n const rightArray = right.values || [];\n\n for (let i = 0; i < leftArray.length && i < rightArray.length; ++i) {\n const compare = valueCompare(leftArray[i], rightArray[i]);\n if (compare) {\n return compare;\n }\n }\n return primitiveComparator(leftArray.length, rightArray.length);\n}\n\nfunction compareMaps(left: api.MapValue, right: api.MapValue): number {\n const leftMap = left.fields || {};\n const leftKeys = Object.keys(leftMap);\n const rightMap = right.fields || {};\n const rightKeys = Object.keys(rightMap);\n\n // Even though MapValues are likely sorted correctly based on their insertion\n // order (e.g. when received from the backend), local modifications can bring\n // elements out of order. We need to re-sort the elements to ensure that\n // canonical IDs are independent of insertion order.\n leftKeys.sort();\n rightKeys.sort();\n\n for (let i = 0; i < leftKeys.length && i < rightKeys.length; ++i) {\n const keyCompare = primitiveComparator(leftKeys[i], rightKeys[i]);\n if (keyCompare !== 0) {\n return keyCompare;\n }\n const compare = valueCompare(leftMap[leftKeys[i]], rightMap[rightKeys[i]]);\n if (compare !== 0) {\n return compare;\n }\n }\n\n return primitiveComparator(leftKeys.length, rightKeys.length);\n}\n\n/**\n * Generates the canonical ID for the provided field value (as used in Target\n * serialization).\n */\nexport function canonicalId(value: api.Value): string {\n return canonifyValue(value);\n}\n\nfunction canonifyValue(value: api.Value): string {\n if ('nullValue' in value) {\n return 'null';\n } else if ('booleanValue' in value) {\n return '' + value.booleanValue!;\n } else if ('integerValue' in value) {\n return '' + value.integerValue!;\n } else if ('doubleValue' in value) {\n return '' + value.doubleValue!;\n } else if ('timestampValue' in value) {\n return canonifyTimestamp(value.timestampValue!);\n } else if ('stringValue' in value) {\n return value.stringValue!;\n } else if ('bytesValue' in value) {\n return canonifyByteString(value.bytesValue!);\n } else if ('referenceValue' in value) {\n return canonifyReference(value.referenceValue!);\n } else if ('geoPointValue' in value) {\n return canonifyGeoPoint(value.geoPointValue!);\n } else if ('arrayValue' in value) {\n return canonifyArray(value.arrayValue!);\n } else if ('mapValue' in value) {\n return canonifyMap(value.mapValue!);\n } else {\n return fail('Invalid value type: ' + JSON.stringify(value));\n }\n}\n\nfunction canonifyByteString(byteString: string | Uint8Array): string {\n return normalizeByteString(byteString).toBase64();\n}\n\nfunction canonifyTimestamp(timestamp: api.Timestamp): string {\n const normalizedTimestamp = normalizeTimestamp(timestamp);\n return `time(${normalizedTimestamp.seconds},${normalizedTimestamp.nanos})`;\n}\n\nfunction canonifyGeoPoint(geoPoint: api.LatLng): string {\n return `geo(${geoPoint.latitude},${geoPoint.longitude})`;\n}\n\nfunction canonifyReference(referenceValue: string): string {\n return DocumentKey.fromName(referenceValue).toString();\n}\n\nfunction canonifyMap(mapValue: api.MapValue): string {\n // Iteration order in JavaScript is not guaranteed. To ensure that we generate\n // matching canonical IDs for identical maps, we need to sort the keys.\n const sortedKeys = Object.keys(mapValue.fields || {}).sort();\n\n let result = '{';\n let first = true;\n for (const key of sortedKeys) {\n if (!first) {\n result += ',';\n } else {\n first = false;\n }\n result += `${key}:${canonifyValue(mapValue.fields![key])}`;\n }\n return result + '}';\n}\n\nfunction canonifyArray(arrayValue: api.ArrayValue): string {\n let result = '[';\n let first = true;\n for (const value of arrayValue.values || []) {\n if (!first) {\n result += ',';\n } else {\n first = false;\n }\n result += canonifyValue(value);\n }\n return result + ']';\n}\n\n/**\n * Returns an approximate (and wildly inaccurate) in-memory size for the field\n * value.\n *\n * The memory size takes into account only the actual user data as it resides\n * in memory and ignores object overhead.\n */\nexport function estimateByteSize(value: api.Value): number {\n switch (typeOrder(value)) {\n case TypeOrder.NullValue:\n return 4;\n case TypeOrder.BooleanValue:\n return 4;\n case TypeOrder.NumberValue:\n return 8;\n case TypeOrder.TimestampValue:\n // Timestamps are made up of two distinct numbers (seconds + nanoseconds)\n return 16;\n case TypeOrder.ServerTimestampValue:\n const previousValue = getPreviousValue(value);\n return previousValue ? 16 + estimateByteSize(previousValue) : 16;\n case TypeOrder.StringValue:\n // See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures:\n // \"JavaScript's String type is [...] a set of elements of 16-bit unsigned\n // integer values\"\n return value.stringValue!.length * 2;\n case TypeOrder.BlobValue:\n return normalizeByteString(value.bytesValue!).approximateByteSize();\n case TypeOrder.RefValue:\n return value.referenceValue!.length;\n case TypeOrder.GeoPointValue:\n // GeoPoints are made up of two distinct numbers (latitude + longitude)\n return 16;\n case TypeOrder.ArrayValue:\n return estimateArrayByteSize(value.arrayValue!);\n case TypeOrder.ObjectValue:\n return estimateMapByteSize(value.mapValue!);\n default:\n throw fail('Invalid value type: ' + JSON.stringify(value));\n }\n}\n\nfunction estimateMapByteSize(mapValue: api.MapValue): number {\n let size = 0;\n forEach(mapValue.fields || {}, (key, val) => {\n size += key.length + estimateByteSize(val);\n });\n return size;\n}\n\nfunction estimateArrayByteSize(arrayValue: api.ArrayValue): number {\n return (arrayValue.values || []).reduce(\n (previousSize, value) => previousSize + estimateByteSize(value),\n 0\n );\n}\n\n/**\n * Converts the possible Proto values for a timestamp value into a \"seconds and\n * nanos\" representation.\n */\nexport function normalizeTimestamp(\n date: api.Timestamp\n): { seconds: number; nanos: number } {\n hardAssert(!!date, 'Cannot normalize null or undefined timestamp.');\n\n // The json interface (for the browser) will return an iso timestamp string,\n // while the proto js library (for node) will return a\n // google.protobuf.Timestamp instance.\n if (typeof date === 'string') {\n // The date string can have higher precision (nanos) than the Date class\n // (millis), so we do some custom parsing here.\n\n // Parse the nanos right out of the string.\n let nanos = 0;\n const fraction = ISO_TIMESTAMP_REG_EXP.exec(date);\n hardAssert(!!fraction, 'invalid timestamp: ' + date);\n if (fraction[1]) {\n // Pad the fraction out to 9 digits (nanos).\n let nanoStr = fraction[1];\n nanoStr = (nanoStr + '000000000').substr(0, 9);\n nanos = Number(nanoStr);\n }\n\n // Parse the date to get the seconds.\n const parsedDate = new Date(date);\n const seconds = Math.floor(parsedDate.getTime() / 1000);\n\n return { seconds, nanos };\n } else {\n // TODO(b/37282237): Use strings for Proto3 timestamps\n // assert(!this.options.useProto3Json,\n // 'The timestamp instance format requires Proto JS.');\n const seconds = normalizeNumber(date.seconds);\n const nanos = normalizeNumber(date.nanos);\n return { seconds, nanos };\n }\n}\n\n/**\n * Converts the possible Proto types for numbers into a JavaScript number.\n * Returns 0 if the value is not numeric.\n */\nexport function normalizeNumber(value: number | string | undefined): number {\n // TODO(bjornick): Handle int64 greater than 53 bits.\n if (typeof value === 'number') {\n return value;\n } else if (typeof value === 'string') {\n return Number(value);\n } else {\n return 0;\n }\n}\n\n/** Converts the possible Proto types for Blobs into a ByteString. */\nexport function normalizeByteString(blob: string | Uint8Array): ByteString {\n if (typeof blob === 'string') {\n return ByteString.fromBase64String(blob);\n } else {\n return ByteString.fromUint8Array(blob);\n }\n}\n\n/** Returns a reference value for the provided database and key. */\nexport function refValue(databaseId: DatabaseId, key: DocumentKey): api.Value {\n return {\n referenceValue: `projects/${databaseId.projectId}/databases/${\n databaseId.database\n }/documents/${key.path.canonicalString()}`\n };\n}\n\n/** Returns true if `value` is an IntegerValue . */\nexport function isInteger(\n value?: api.Value | null\n): value is { integerValue: string | number } {\n return !!value && 'integerValue' in value;\n}\n\n/** Returns true if `value` is a DoubleValue. */\nexport function isDouble(\n value?: api.Value | null\n): value is { doubleValue: string | number } {\n return !!value && 'doubleValue' in value;\n}\n\n/** Returns true if `value` is either an IntegerValue or a DoubleValue. */\nexport function isNumber(value?: api.Value | null): boolean {\n return isInteger(value) || isDouble(value);\n}\n\n/** Returns true if `value` is an ArrayValue. */\nexport function isArray(\n value?: api.Value | null\n): value is { arrayValue: api.ArrayValue } {\n return !!value && 'arrayValue' in value;\n}\n\n/** Returns true if `value` is a ReferenceValue. */\nexport function isReferenceValue(\n value?: api.Value | null\n): value is { referenceValue: string } {\n return !!value && 'referenceValue' in value;\n}\n\n/** Returns true if `value` is a NullValue. */\nexport function isNullValue(\n value?: api.Value | null\n): value is { nullValue: 'NULL_VALUE' } {\n return !!value && 'nullValue' in value;\n}\n\n/** Returns true if `value` is NaN. */\nexport function isNanValue(\n value?: api.Value | null\n): value is { doubleValue: 'NaN' | number } {\n return !!value && 'doubleValue' in value && isNaN(Number(value.doubleValue));\n}\n\n/** Returns true if `value` is a MapValue. */\nexport function isMapValue(\n value?: api.Value | null\n): value is { mapValue: api.MapValue } {\n return !!value && 'mapValue' in value;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as api from '../protos/firestore_proto_api';\n\nimport { Timestamp } from '../api/timestamp';\nimport { debugAssert } from '../util/assert';\nimport { JsonProtoSerializer } from '../remote/serializer';\nimport {\n valueEquals,\n isArray,\n isInteger,\n isNumber,\n normalizeNumber\n} from './values';\nimport { serverTimestamp } from './server_timestamps';\nimport { arrayEquals } from '../util/misc';\n\n/** Represents a transform within a TransformMutation. */\nexport interface TransformOperation {\n /**\n * Computes the local transform result against the provided `previousValue`,\n * optionally using the provided localWriteTime.\n */\n applyToLocalView(\n previousValue: api.Value | null,\n localWriteTime: Timestamp\n ): api.Value;\n\n /**\n * Computes a final transform result after the transform has been acknowledged\n * by the server, potentially using the server-provided transformResult.\n */\n applyToRemoteDocument(\n previousValue: api.Value | null,\n transformResult: api.Value | null\n ): api.Value;\n\n /**\n * If this transform operation is not idempotent, returns the base value to\n * persist for this transform. If a base value is returned, the transform\n * operation is always applied to this base value, even if document has\n * already been updated.\n *\n * Base values provide consistent behavior for non-idempotent transforms and\n * allow us to return the same latency-compensated value even if the backend\n * has already applied the transform operation. The base value is null for\n * idempotent transforms, as they can be re-played even if the backend has\n * already applied them.\n *\n * @return a base value to store along with the mutation, or null for\n * idempotent transforms.\n */\n computeBaseValue(previousValue: api.Value | null): api.Value | null;\n\n isEqual(other: TransformOperation): boolean;\n}\n\n/** Transforms a value into a server-generated timestamp. */\nexport class ServerTimestampTransform implements TransformOperation {\n private constructor() {}\n static instance = new ServerTimestampTransform();\n\n applyToLocalView(\n previousValue: api.Value | null,\n localWriteTime: Timestamp\n ): api.Value {\n return serverTimestamp(localWriteTime!, previousValue);\n }\n\n applyToRemoteDocument(\n previousValue: api.Value | null,\n transformResult: api.Value | null\n ): api.Value {\n return transformResult!;\n }\n\n computeBaseValue(previousValue: api.Value | null): api.Value | null {\n return null; // Server timestamps are idempotent and don't require a base value.\n }\n\n isEqual(other: TransformOperation): boolean {\n return other instanceof ServerTimestampTransform;\n }\n}\n\n/** Transforms an array value via a union operation. */\nexport class ArrayUnionTransformOperation implements TransformOperation {\n constructor(readonly elements: api.Value[]) {}\n\n applyToLocalView(\n previousValue: api.Value | null,\n localWriteTime: Timestamp\n ): api.Value {\n return this.apply(previousValue);\n }\n\n applyToRemoteDocument(\n previousValue: api.Value | null,\n transformResult: api.Value | null\n ): api.Value {\n // The server just sends null as the transform result for array operations,\n // so we have to calculate a result the same as we do for local\n // applications.\n return this.apply(previousValue);\n }\n\n private apply(previousValue: api.Value | null): api.Value {\n const values = coercedFieldValuesArray(previousValue);\n for (const toUnion of this.elements) {\n if (!values.some(element => valueEquals(element, toUnion))) {\n values.push(toUnion);\n }\n }\n return { arrayValue: { values } };\n }\n\n computeBaseValue(previousValue: api.Value | null): api.Value | null {\n return null; // Array transforms are idempotent and don't require a base value.\n }\n\n isEqual(other: TransformOperation): boolean {\n return (\n other instanceof ArrayUnionTransformOperation &&\n arrayEquals(this.elements, other.elements, valueEquals)\n );\n }\n}\n\n/** Transforms an array value via a remove operation. */\nexport class ArrayRemoveTransformOperation implements TransformOperation {\n constructor(readonly elements: api.Value[]) {}\n\n applyToLocalView(\n previousValue: api.Value | null,\n localWriteTime: Timestamp\n ): api.Value {\n return this.apply(previousValue);\n }\n\n applyToRemoteDocument(\n previousValue: api.Value | null,\n transformResult: api.Value | null\n ): api.Value {\n // The server just sends null as the transform result for array operations,\n // so we have to calculate a result the same as we do for local\n // applications.\n return this.apply(previousValue);\n }\n\n private apply(previousValue: api.Value | null): api.Value {\n let values = coercedFieldValuesArray(previousValue);\n for (const toRemove of this.elements) {\n values = values.filter(element => !valueEquals(element, toRemove));\n }\n return { arrayValue: { values } };\n }\n\n computeBaseValue(previousValue: api.Value | null): api.Value | null {\n return null; // Array transforms are idempotent and don't require a base value.\n }\n\n isEqual(other: TransformOperation): boolean {\n return (\n other instanceof ArrayRemoveTransformOperation &&\n arrayEquals(this.elements, other.elements, valueEquals)\n );\n }\n}\n\n/**\n * Implements the backend semantics for locally computed NUMERIC_ADD (increment)\n * transforms. Converts all field values to integers or doubles, but unlike the\n * backend does not cap integer values at 2^63. Instead, JavaScript number\n * arithmetic is used and precision loss can occur for values greater than 2^53.\n */\nexport class NumericIncrementTransformOperation implements TransformOperation {\n constructor(\n private readonly serializer: JsonProtoSerializer,\n readonly operand: api.Value\n ) {\n debugAssert(\n isNumber(operand),\n 'NumericIncrementTransform transform requires a NumberValue'\n );\n }\n\n applyToLocalView(\n previousValue: api.Value | null,\n localWriteTime: Timestamp\n ): api.Value {\n // PORTING NOTE: Since JavaScript's integer arithmetic is limited to 53 bit\n // precision and resolves overflows by reducing precision, we do not\n // manually cap overflows at 2^63.\n const baseValue = this.computeBaseValue(previousValue);\n const sum = this.asNumber(baseValue) + this.asNumber(this.operand);\n if (isInteger(baseValue) && isInteger(this.operand)) {\n return this.serializer.toInteger(sum);\n } else {\n return this.serializer.toDouble(sum);\n }\n }\n\n applyToRemoteDocument(\n previousValue: api.Value | null,\n transformResult: api.Value | null\n ): api.Value {\n debugAssert(\n transformResult !== null,\n \"Didn't receive transformResult for NUMERIC_ADD transform\"\n );\n return transformResult;\n }\n\n /**\n * Inspects the provided value, returning the provided value if it is already\n * a NumberValue, otherwise returning a coerced value of 0.\n */\n computeBaseValue(previousValue: api.Value | null): api.Value {\n return isNumber(previousValue) ? previousValue! : { integerValue: 0 };\n }\n\n isEqual(other: TransformOperation): boolean {\n return (\n other instanceof NumericIncrementTransformOperation &&\n valueEquals(this.operand, other.operand)\n );\n }\n\n private asNumber(value: api.Value): number {\n return normalizeNumber(value.integerValue || value.doubleValue);\n }\n}\n\nfunction coercedFieldValuesArray(value: api.Value | null): api.Value[] {\n return isArray(value) && value.arrayValue.values\n ? value.arrayValue.values.slice()\n : [];\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as api from '../protos/firestore_proto_api';\n\nimport { Timestamp } from '../api/timestamp';\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { debugAssert, fail, hardAssert } from '../util/assert';\n\nimport {\n Document,\n MaybeDocument,\n NoDocument,\n UnknownDocument\n} from './document';\nimport { DocumentKey } from './document_key';\nimport { ObjectValue, ObjectValueBuilder } from './object_value';\nimport { FieldPath } from './path';\nimport { TransformOperation } from './transform_operation';\nimport { arrayEquals } from '../util/misc';\n\n/**\n * Provides a set of fields that can be used to partially patch a document.\n * FieldMask is used in conjunction with ObjectValue.\n * Examples:\n * foo - Overwrites foo entirely with the provided value. If foo is not\n * present in the companion ObjectValue, the field is deleted.\n * foo.bar - Overwrites only the field bar of the object foo.\n * If foo is not an object, foo is replaced with an object\n * containing foo\n */\nexport class FieldMask {\n constructor(readonly fields: FieldPath[]) {\n // TODO(dimond): validation of FieldMask\n // Sort the field mask to support `FieldMask.isEqual()` and assert below.\n fields.sort(FieldPath.comparator);\n debugAssert(\n !fields.some((v, i) => i !== 0 && v.isEqual(fields[i - 1])),\n 'FieldMask contains field that is not unique: ' +\n fields.find((v, i) => i !== 0 && v.isEqual(fields[i - 1]))!\n );\n }\n\n /**\n * Verifies that `fieldPath` is included by at least one field in this field\n * mask.\n *\n * This is an O(n) operation, where `n` is the size of the field mask.\n */\n covers(fieldPath: FieldPath): boolean {\n for (const fieldMaskPath of this.fields) {\n if (fieldMaskPath.isPrefixOf(fieldPath)) {\n return true;\n }\n }\n return false;\n }\n\n isEqual(other: FieldMask): boolean {\n return arrayEquals(this.fields, other.fields, (l, r) => l.isEqual(r));\n }\n}\n\n/** A field path and the TransformOperation to perform upon it. */\nexport class FieldTransform {\n constructor(\n readonly field: FieldPath,\n readonly transform: TransformOperation\n ) {}\n\n isEqual(other: FieldTransform): boolean {\n return (\n this.field.isEqual(other.field) && this.transform.isEqual(other.transform)\n );\n }\n}\n\n/** The result of successfully applying a mutation to the backend. */\nexport class MutationResult {\n constructor(\n /**\n * The version at which the mutation was committed:\n *\n * - For most operations, this is the updateTime in the WriteResult.\n * - For deletes, the commitTime of the WriteResponse (because deletes are\n * not stored and have no updateTime).\n *\n * Note that these versions can be different: No-op writes will not change\n * the updateTime even though the commitTime advances.\n */\n readonly version: SnapshotVersion,\n /**\n * The resulting fields returned from the backend after a\n * TransformMutation has been committed. Contains one FieldValue for each\n * FieldTransform that was in the mutation.\n *\n * Will be null if the mutation was not a TransformMutation.\n */\n readonly transformResults: Array<api.Value | null> | null\n ) {}\n}\n\nexport const enum MutationType {\n Set,\n Patch,\n Transform,\n Delete,\n Verify\n}\n\n/**\n * Encodes a precondition for a mutation. This follows the model that the\n * backend accepts with the special case of an explicit \"empty\" precondition\n * (meaning no precondition).\n */\nexport class Precondition {\n private constructor(\n readonly updateTime?: SnapshotVersion,\n readonly exists?: boolean\n ) {\n debugAssert(\n updateTime === undefined || exists === undefined,\n 'Precondition can specify \"exists\" or \"updateTime\" but not both'\n );\n }\n\n /** Creates a new empty Precondition. */\n static none(): Precondition {\n return new Precondition();\n }\n\n /** Creates a new Precondition with an exists flag. */\n static exists(exists: boolean): Precondition {\n return new Precondition(undefined, exists);\n }\n\n /** Creates a new Precondition based on a version a document exists at. */\n static updateTime(version: SnapshotVersion): Precondition {\n return new Precondition(version);\n }\n\n /** Returns whether this Precondition is empty. */\n get isNone(): boolean {\n return this.updateTime === undefined && this.exists === undefined;\n }\n\n /**\n * Returns true if the preconditions is valid for the given document\n * (or null if no document is available).\n */\n isValidFor(maybeDoc: MaybeDocument | null): boolean {\n if (this.updateTime !== undefined) {\n return (\n maybeDoc instanceof Document &&\n maybeDoc.version.isEqual(this.updateTime)\n );\n } else if (this.exists !== undefined) {\n return this.exists === maybeDoc instanceof Document;\n } else {\n debugAssert(this.isNone, 'Precondition should be empty');\n return true;\n }\n }\n\n isEqual(other: Precondition): boolean {\n return (\n this.exists === other.exists &&\n (this.updateTime\n ? !!other.updateTime && this.updateTime.isEqual(other.updateTime)\n : !other.updateTime)\n );\n }\n}\n\n/**\n * A mutation describes a self-contained change to a document. Mutations can\n * create, replace, delete, and update subsets of documents.\n *\n * Mutations not only act on the value of the document but also its version.\n *\n * For local mutations (mutations that haven't been committed yet), we preserve\n * the existing version for Set, Patch, and Transform mutations. For Delete\n * mutations, we reset the version to 0.\n *\n * Here's the expected transition table.\n *\n * MUTATION APPLIED TO RESULTS IN\n *\n * SetMutation Document(v3) Document(v3)\n * SetMutation NoDocument(v3) Document(v0)\n * SetMutation null Document(v0)\n * PatchMutation Document(v3) Document(v3)\n * PatchMutation NoDocument(v3) NoDocument(v3)\n * PatchMutation null null\n * TransformMutation Document(v3) Document(v3)\n * TransformMutation NoDocument(v3) NoDocument(v3)\n * TransformMutation null null\n * DeleteMutation Document(v3) NoDocument(v0)\n * DeleteMutation NoDocument(v3) NoDocument(v0)\n * DeleteMutation null NoDocument(v0)\n *\n * For acknowledged mutations, we use the updateTime of the WriteResponse as\n * the resulting version for Set, Patch, and Transform mutations. As deletes\n * have no explicit update time, we use the commitTime of the WriteResponse for\n * Delete mutations.\n *\n * If a mutation is acknowledged by the backend but fails the precondition check\n * locally, we return an `UnknownDocument` and rely on Watch to send us the\n * updated version.\n *\n * Note that TransformMutations don't create Documents (in the case of being\n * applied to a NoDocument), even though they would on the backend. This is\n * because the client always combines the TransformMutation with a SetMutation\n * or PatchMutation and we only want to apply the transform if the prior\n * mutation resulted in a Document (always true for a SetMutation, but not\n * necessarily for a PatchMutation).\n *\n * ## Subclassing Notes\n *\n * Subclasses of Mutation need to implement applyToRemoteDocument() and\n * applyToLocalView() to implement the actual behavior of applying the mutation\n * to some source document.\n */\nexport abstract class Mutation {\n abstract readonly type: MutationType;\n abstract readonly key: DocumentKey;\n abstract readonly precondition: Precondition;\n\n /**\n * Applies this mutation to the given MaybeDocument or null for the purposes\n * of computing a new remote document. If the input document doesn't match the\n * expected state (e.g. it is null or outdated), an `UnknownDocument` can be\n * returned.\n *\n * @param maybeDoc The document to mutate. The input document can be null if\n * the client has no knowledge of the pre-mutation state of the document.\n * @param mutationResult The result of applying the mutation from the backend.\n * @return The mutated document. The returned document may be an\n * UnknownDocument if the mutation could not be applied to the locally\n * cached base document.\n */\n abstract applyToRemoteDocument(\n maybeDoc: MaybeDocument | null,\n mutationResult: MutationResult\n ): MaybeDocument;\n\n /**\n * Applies this mutation to the given MaybeDocument or null for the purposes\n * of computing the new local view of a document. Both the input and returned\n * documents can be null.\n *\n * @param maybeDoc The document to mutate. The input document can be null if\n * the client has no knowledge of the pre-mutation state of the document.\n * @param baseDoc The state of the document prior to this mutation batch. The\n * input document can be null if the client has no knowledge of the\n * pre-mutation state of the document.\n * @param localWriteTime A timestamp indicating the local write time of the\n * batch this mutation is a part of.\n * @return The mutated document. The returned document may be null, but only\n * if maybeDoc was null and the mutation would not create a new document.\n */\n abstract applyToLocalView(\n maybeDoc: MaybeDocument | null,\n baseDoc: MaybeDocument | null,\n localWriteTime: Timestamp\n ): MaybeDocument | null;\n\n /**\n * If this mutation is not idempotent, returns the base value to persist with\n * this mutation. If a base value is returned, the mutation is always applied\n * to this base value, even if document has already been updated.\n *\n * The base value is a sparse object that consists of only the document\n * fields for which this mutation contains a non-idempotent transformation\n * (e.g. a numeric increment). The provided value guarantees consistent\n * behavior for non-idempotent transforms and allow us to return the same\n * latency-compensated value even if the backend has already applied the\n * mutation. The base value is null for idempotent mutations, as they can be\n * re-played even if the backend has already applied them.\n *\n * @return a base value to store along with the mutation, or null for\n * idempotent mutations.\n */\n abstract extractBaseValue(maybeDoc: MaybeDocument | null): ObjectValue | null;\n\n abstract isEqual(other: Mutation): boolean;\n\n protected verifyKeyMatches(maybeDoc: MaybeDocument | null): void {\n if (maybeDoc != null) {\n debugAssert(\n maybeDoc.key.isEqual(this.key),\n 'Can only apply a mutation to a document with the same key'\n );\n }\n }\n\n /**\n * Returns the version from the given document for use as the result of a\n * mutation. Mutations are defined to return the version of the base document\n * only if it is an existing document. Deleted and unknown documents have a\n * post-mutation version of SnapshotVersion.min().\n */\n protected static getPostMutationVersion(\n maybeDoc: MaybeDocument | null\n ): SnapshotVersion {\n if (maybeDoc instanceof Document) {\n return maybeDoc.version;\n } else {\n return SnapshotVersion.min();\n }\n }\n}\n\n/**\n * A mutation that creates or replaces the document at the given key with the\n * object value contents.\n */\nexport class SetMutation extends Mutation {\n constructor(\n readonly key: DocumentKey,\n readonly value: ObjectValue,\n readonly precondition: Precondition\n ) {\n super();\n }\n\n readonly type: MutationType = MutationType.Set;\n\n applyToRemoteDocument(\n maybeDoc: MaybeDocument | null,\n mutationResult: MutationResult\n ): MaybeDocument {\n this.verifyKeyMatches(maybeDoc);\n\n debugAssert(\n mutationResult.transformResults == null,\n 'Transform results received by SetMutation.'\n );\n\n // Unlike applyToLocalView, if we're applying a mutation to a remote\n // document the server has accepted the mutation so the precondition must\n // have held.\n\n const version = mutationResult.version;\n return new Document(this.key, version, this.value, {\n hasCommittedMutations: true\n });\n }\n\n applyToLocalView(\n maybeDoc: MaybeDocument | null,\n baseDoc: MaybeDocument | null,\n localWriteTime: Timestamp\n ): MaybeDocument | null {\n this.verifyKeyMatches(maybeDoc);\n\n if (!this.precondition.isValidFor(maybeDoc)) {\n return maybeDoc;\n }\n\n const version = Mutation.getPostMutationVersion(maybeDoc);\n return new Document(this.key, version, this.value, {\n hasLocalMutations: true\n });\n }\n\n extractBaseValue(maybeDoc: MaybeDocument | null): null {\n return null;\n }\n\n isEqual(other: Mutation): boolean {\n return (\n other instanceof SetMutation &&\n this.key.isEqual(other.key) &&\n this.value.isEqual(other.value) &&\n this.precondition.isEqual(other.precondition)\n );\n }\n}\n\n/**\n * A mutation that modifies fields of the document at the given key with the\n * given values. The values are applied through a field mask:\n *\n * * When a field is in both the mask and the values, the corresponding field\n * is updated.\n * * When a field is in neither the mask nor the values, the corresponding\n * field is unmodified.\n * * When a field is in the mask but not in the values, the corresponding field\n * is deleted.\n * * When a field is not in the mask but is in the values, the values map is\n * ignored.\n */\nexport class PatchMutation extends Mutation {\n constructor(\n readonly key: DocumentKey,\n readonly data: ObjectValue,\n readonly fieldMask: FieldMask,\n readonly precondition: Precondition\n ) {\n super();\n }\n\n readonly type: MutationType = MutationType.Patch;\n\n applyToRemoteDocument(\n maybeDoc: MaybeDocument | null,\n mutationResult: MutationResult\n ): MaybeDocument {\n this.verifyKeyMatches(maybeDoc);\n\n debugAssert(\n mutationResult.transformResults == null,\n 'Transform results received by PatchMutation.'\n );\n\n if (!this.precondition.isValidFor(maybeDoc)) {\n // Since the mutation was not rejected, we know that the precondition\n // matched on the backend. We therefore must not have the expected version\n // of the document in our cache and return an UnknownDocument with the\n // known updateTime.\n return new UnknownDocument(this.key, mutationResult.version);\n }\n\n const newData = this.patchDocument(maybeDoc);\n return new Document(this.key, mutationResult.version, newData, {\n hasCommittedMutations: true\n });\n }\n\n applyToLocalView(\n maybeDoc: MaybeDocument | null,\n baseDoc: MaybeDocument | null,\n localWriteTime: Timestamp\n ): MaybeDocument | null {\n this.verifyKeyMatches(maybeDoc);\n\n if (!this.precondition.isValidFor(maybeDoc)) {\n return maybeDoc;\n }\n\n const version = Mutation.getPostMutationVersion(maybeDoc);\n const newData = this.patchDocument(maybeDoc);\n return new Document(this.key, version, newData, {\n hasLocalMutations: true\n });\n }\n\n extractBaseValue(maybeDoc: MaybeDocument | null): null {\n return null;\n }\n\n isEqual(other: Mutation): boolean {\n return (\n other instanceof PatchMutation &&\n this.key.isEqual(other.key) &&\n this.fieldMask.isEqual(other.fieldMask) &&\n this.precondition.isEqual(other.precondition)\n );\n }\n\n /**\n * Patches the data of document if available or creates a new document. Note\n * that this does not check whether or not the precondition of this patch\n * holds.\n */\n private patchDocument(maybeDoc: MaybeDocument | null): ObjectValue {\n let data: ObjectValue;\n if (maybeDoc instanceof Document) {\n data = maybeDoc.data();\n } else {\n data = ObjectValue.empty();\n }\n return this.patchObject(data);\n }\n\n private patchObject(data: ObjectValue): ObjectValue {\n const builder = new ObjectValueBuilder(data);\n this.fieldMask.fields.forEach(fieldPath => {\n if (!fieldPath.isEmpty()) {\n const newValue = this.data.field(fieldPath);\n if (newValue !== null) {\n builder.set(fieldPath, newValue);\n } else {\n builder.delete(fieldPath);\n }\n }\n });\n return builder.build();\n }\n}\n\n/**\n * A mutation that modifies specific fields of the document with transform\n * operations. Currently the only supported transform is a server timestamp, but\n * IP Address, increment(n), etc. could be supported in the future.\n *\n * It is somewhat similar to a PatchMutation in that it patches specific fields\n * and has no effect when applied to a null or NoDocument (see comment on\n * Mutation for rationale).\n */\nexport class TransformMutation extends Mutation {\n readonly type: MutationType = MutationType.Transform;\n\n // NOTE: We set a precondition of exists: true as a safety-check, since we\n // always combine TransformMutations with a SetMutation or PatchMutation which\n // (if successful) should end up with an existing document.\n readonly precondition = Precondition.exists(true);\n\n constructor(\n readonly key: DocumentKey,\n readonly fieldTransforms: FieldTransform[]\n ) {\n super();\n }\n\n applyToRemoteDocument(\n maybeDoc: MaybeDocument | null,\n mutationResult: MutationResult\n ): MaybeDocument {\n this.verifyKeyMatches(maybeDoc);\n\n hardAssert(\n mutationResult.transformResults != null,\n 'Transform results missing for TransformMutation.'\n );\n\n if (!this.precondition.isValidFor(maybeDoc)) {\n // Since the mutation was not rejected, we know that the precondition\n // matched on the backend. We therefore must not have the expected version\n // of the document in our cache and return an UnknownDocument with the\n // known updateTime.\n return new UnknownDocument(this.key, mutationResult.version);\n }\n\n const doc = this.requireDocument(maybeDoc);\n const transformResults = this.serverTransformResults(\n maybeDoc,\n mutationResult.transformResults!\n );\n\n const version = mutationResult.version;\n const newData = this.transformObject(doc.data(), transformResults);\n return new Document(this.key, version, newData, {\n hasCommittedMutations: true\n });\n }\n\n applyToLocalView(\n maybeDoc: MaybeDocument | null,\n baseDoc: MaybeDocument | null,\n localWriteTime: Timestamp\n ): MaybeDocument | null {\n this.verifyKeyMatches(maybeDoc);\n\n if (!this.precondition.isValidFor(maybeDoc)) {\n return maybeDoc;\n }\n\n const doc = this.requireDocument(maybeDoc);\n const transformResults = this.localTransformResults(\n localWriteTime,\n maybeDoc,\n baseDoc\n );\n const newData = this.transformObject(doc.data(), transformResults);\n return new Document(this.key, doc.version, newData, {\n hasLocalMutations: true\n });\n }\n\n extractBaseValue(maybeDoc: MaybeDocument | null): ObjectValue | null {\n let baseObject: ObjectValueBuilder | null = null;\n for (const fieldTransform of this.fieldTransforms) {\n const existingValue =\n maybeDoc instanceof Document\n ? maybeDoc.field(fieldTransform.field)\n : undefined;\n const coercedValue = fieldTransform.transform.computeBaseValue(\n existingValue || null\n );\n\n if (coercedValue != null) {\n if (baseObject == null) {\n baseObject = new ObjectValueBuilder().set(\n fieldTransform.field,\n coercedValue\n );\n } else {\n baseObject = baseObject.set(fieldTransform.field, coercedValue);\n }\n }\n }\n return baseObject ? baseObject.build() : null;\n }\n\n isEqual(other: Mutation): boolean {\n return (\n other instanceof TransformMutation &&\n this.key.isEqual(other.key) &&\n arrayEquals(this.fieldTransforms, other.fieldTransforms, (l, r) =>\n l.isEqual(r)\n ) &&\n this.precondition.isEqual(other.precondition)\n );\n }\n\n /**\n * Asserts that the given MaybeDocument is actually a Document and verifies\n * that it matches the key for this mutation. Since we only support\n * transformations with precondition exists this method is guaranteed to be\n * safe.\n */\n private requireDocument(maybeDoc: MaybeDocument | null): Document {\n debugAssert(\n maybeDoc instanceof Document,\n 'Unknown MaybeDocument type ' + maybeDoc\n );\n debugAssert(\n maybeDoc.key.isEqual(this.key),\n 'Can only transform a document with the same key'\n );\n return maybeDoc;\n }\n\n /**\n * Creates a list of \"transform results\" (a transform result is a field value\n * representing the result of applying a transform) for use after a\n * TransformMutation has been acknowledged by the server.\n *\n * @param baseDoc The document prior to applying this mutation batch.\n * @param serverTransformResults The transform results received by the server.\n * @return The transform results list.\n */\n private serverTransformResults(\n baseDoc: MaybeDocument | null,\n serverTransformResults: Array<api.Value | null>\n ): api.Value[] {\n const transformResults: api.Value[] = [];\n hardAssert(\n this.fieldTransforms.length === serverTransformResults.length,\n `server transform result count (${serverTransformResults.length}) ` +\n `should match field transform count (${this.fieldTransforms.length})`\n );\n\n for (let i = 0; i < serverTransformResults.length; i++) {\n const fieldTransform = this.fieldTransforms[i];\n const transform = fieldTransform.transform;\n let previousValue: api.Value | null = null;\n if (baseDoc instanceof Document) {\n previousValue = baseDoc.field(fieldTransform.field);\n }\n transformResults.push(\n transform.applyToRemoteDocument(\n previousValue,\n serverTransformResults[i]\n )\n );\n }\n return transformResults;\n }\n\n /**\n * Creates a list of \"transform results\" (a transform result is a field value\n * representing the result of applying a transform) for use when applying a\n * TransformMutation locally.\n *\n * @param localWriteTime The local time of the transform mutation (used to\n * generate ServerTimestampValues).\n * @param maybeDoc The current state of the document after applying all\n * previous mutations.\n * @param baseDoc The document prior to applying this mutation batch.\n * @return The transform results list.\n */\n private localTransformResults(\n localWriteTime: Timestamp,\n maybeDoc: MaybeDocument | null,\n baseDoc: MaybeDocument | null\n ): api.Value[] {\n const transformResults: api.Value[] = [];\n for (const fieldTransform of this.fieldTransforms) {\n const transform = fieldTransform.transform;\n\n let previousValue: api.Value | null = null;\n if (maybeDoc instanceof Document) {\n previousValue = maybeDoc.field(fieldTransform.field);\n }\n\n if (previousValue === null && baseDoc instanceof Document) {\n // If the current document does not contain a value for the mutated\n // field, use the value that existed before applying this mutation\n // batch. This solves an edge case where a PatchMutation clears the\n // values in a nested map before the TransformMutation is applied.\n previousValue = baseDoc.field(fieldTransform.field);\n }\n\n transformResults.push(\n transform.applyToLocalView(previousValue, localWriteTime)\n );\n }\n return transformResults;\n }\n\n private transformObject(\n data: ObjectValue,\n transformResults: api.Value[]\n ): ObjectValue {\n debugAssert(\n transformResults.length === this.fieldTransforms.length,\n 'TransformResults length mismatch.'\n );\n\n const builder = new ObjectValueBuilder(data);\n for (let i = 0; i < this.fieldTransforms.length; i++) {\n const fieldTransform = this.fieldTransforms[i];\n const fieldPath = fieldTransform.field;\n builder.set(fieldPath, transformResults[i]);\n }\n return builder.build();\n }\n}\n\n/** A mutation that deletes the document at the given key. */\nexport class DeleteMutation extends Mutation {\n constructor(readonly key: DocumentKey, readonly precondition: Precondition) {\n super();\n }\n\n readonly type: MutationType = MutationType.Delete;\n\n applyToRemoteDocument(\n maybeDoc: MaybeDocument | null,\n mutationResult: MutationResult\n ): MaybeDocument {\n this.verifyKeyMatches(maybeDoc);\n\n debugAssert(\n mutationResult.transformResults == null,\n 'Transform results received by DeleteMutation.'\n );\n\n // Unlike applyToLocalView, if we're applying a mutation to a remote\n // document the server has accepted the mutation so the precondition must\n // have held.\n\n return new NoDocument(this.key, mutationResult.version, {\n hasCommittedMutations: true\n });\n }\n\n applyToLocalView(\n maybeDoc: MaybeDocument | null,\n baseDoc: MaybeDocument | null,\n localWriteTime: Timestamp\n ): MaybeDocument | null {\n this.verifyKeyMatches(maybeDoc);\n\n if (!this.precondition.isValidFor(maybeDoc)) {\n return maybeDoc;\n }\n\n if (maybeDoc) {\n debugAssert(\n maybeDoc.key.isEqual(this.key),\n 'Can only apply mutation to document with same key'\n );\n }\n return new NoDocument(this.key, SnapshotVersion.min());\n }\n\n extractBaseValue(maybeDoc: MaybeDocument | null): null {\n return null;\n }\n\n isEqual(other: Mutation): boolean {\n return (\n other instanceof DeleteMutation &&\n this.key.isEqual(other.key) &&\n this.precondition.isEqual(other.precondition)\n );\n }\n}\n\n/**\n * A mutation that verifies the existence of the document at the given key with\n * the provided precondition.\n *\n * The `verify` operation is only used in Transactions, and this class serves\n * primarily to facilitate serialization into protos.\n */\nexport class VerifyMutation extends Mutation {\n constructor(readonly key: DocumentKey, readonly precondition: Precondition) {\n super();\n }\n\n readonly type: MutationType = MutationType.Verify;\n\n applyToRemoteDocument(\n maybeDoc: MaybeDocument | null,\n mutationResult: MutationResult\n ): MaybeDocument {\n fail('VerifyMutation should only be used in Transactions.');\n }\n\n applyToLocalView(\n maybeDoc: MaybeDocument | null,\n baseDoc: MaybeDocument | null,\n localWriteTime: Timestamp\n ): MaybeDocument | null {\n fail('VerifyMutation should only be used in Transactions.');\n }\n\n extractBaseValue(maybeDoc: MaybeDocument | null): null {\n fail('VerifyMutation should only be used in Transactions.');\n }\n\n isEqual(other: Mutation): boolean {\n return (\n other instanceof VerifyMutation &&\n this.key.isEqual(other.key) &&\n this.precondition.isEqual(other.precondition)\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as api from '../protos/firestore_proto_api';\n\nimport { debugAssert } from '../util/assert';\nimport { FieldMask } from './mutation';\nimport { FieldPath } from './path';\nimport { isServerTimestamp } from './server_timestamps';\nimport { valueEquals, isMapValue, typeOrder } from './values';\nimport { forEach } from '../util/obj';\n\nexport interface JsonObject<T> {\n [name: string]: T;\n}\n\nexport const enum TypeOrder {\n // This order is based on the backend's ordering, but modified to support\n // server timestamps.\n NullValue = 0,\n BooleanValue = 1,\n NumberValue = 2,\n TimestampValue = 3,\n ServerTimestampValue = 4,\n StringValue = 5,\n BlobValue = 6,\n RefValue = 7,\n GeoPointValue = 8,\n ArrayValue = 9,\n ObjectValue = 10\n}\n\n/**\n * An ObjectValue represents a MapValue in the Firestore Proto and offers the\n * ability to add and remove fields (via the ObjectValueBuilder).\n */\nexport class ObjectValue {\n constructor(public readonly proto: { mapValue: api.MapValue }) {\n debugAssert(\n !isServerTimestamp(proto),\n 'ServerTimestamps should be converted to ServerTimestampValue'\n );\n }\n\n static empty(): ObjectValue {\n return new ObjectValue({ mapValue: {} });\n }\n\n /**\n * Returns the value at the given path or null.\n *\n * @param path the path to search\n * @return The value at the path or if there it doesn't exist.\n */\n field(path: FieldPath): api.Value | null {\n if (path.isEmpty()) {\n return this.proto;\n } else {\n let value: api.Value = this.proto;\n for (let i = 0; i < path.length - 1; ++i) {\n if (!value.mapValue!.fields) {\n return null;\n }\n value = value.mapValue!.fields[path.get(i)];\n if (!isMapValue(value)) {\n return null;\n }\n }\n\n value = (value.mapValue!.fields || {})[path.lastSegment()];\n return value || null;\n }\n }\n\n isEqual(other: ObjectValue): boolean {\n return valueEquals(this.proto, other.proto);\n }\n}\n\n/**\n * An Overlay, which contains an update to apply. Can either be Value proto, a\n * map of Overlay values (to represent additional nesting at the given key) or\n * `null` (to represent field deletes).\n */\ntype Overlay = Map<string, Overlay> | api.Value | null;\n\n/**\n * An ObjectValueBuilder provides APIs to set and delete fields from an\n * ObjectValue.\n */\nexport class ObjectValueBuilder {\n /** A map that contains the accumulated changes in this builder. */\n private overlayMap = new Map<string, Overlay>();\n\n /**\n * @param baseObject The object to mutate.\n */\n constructor(private readonly baseObject: ObjectValue = ObjectValue.empty()) {}\n\n /**\n * Sets the field to the provided value.\n *\n * @param path The field path to set.\n * @param value The value to set.\n * @return The current Builder instance.\n */\n set(path: FieldPath, value: api.Value): ObjectValueBuilder {\n debugAssert(\n !path.isEmpty(),\n 'Cannot set field for empty path on ObjectValue'\n );\n this.setOverlay(path, value);\n return this;\n }\n\n /**\n * Removes the field at the specified path. If there is no field at the\n * specified path, nothing is changed.\n *\n * @param path The field path to remove.\n * @return The current Builder instance.\n */\n delete(path: FieldPath): ObjectValueBuilder {\n debugAssert(\n !path.isEmpty(),\n 'Cannot delete field for empty path on ObjectValue'\n );\n this.setOverlay(path, null);\n return this;\n }\n\n /**\n * Adds `value` to the overlay map at `path`. Creates nested map entries if\n * needed.\n */\n private setOverlay(path: FieldPath, value: api.Value | null): void {\n let currentLevel = this.overlayMap;\n\n for (let i = 0; i < path.length - 1; ++i) {\n const currentSegment = path.get(i);\n let currentValue = currentLevel.get(currentSegment);\n\n if (currentValue instanceof Map) {\n // Re-use a previously created map\n currentLevel = currentValue;\n } else if (\n currentValue &&\n typeOrder(currentValue) === TypeOrder.ObjectValue\n ) {\n // Convert the existing Protobuf MapValue into a map\n currentValue = new Map<string, Overlay>(\n Object.entries(currentValue.mapValue!.fields || {})\n );\n currentLevel.set(currentSegment, currentValue);\n currentLevel = currentValue;\n } else {\n // Create an empty map to represent the current nesting level\n currentValue = new Map<string, Overlay>();\n currentLevel.set(currentSegment, currentValue);\n currentLevel = currentValue;\n }\n }\n\n currentLevel.set(path.lastSegment(), value);\n }\n\n /** Returns an ObjectValue with all mutations applied. */\n build(): ObjectValue {\n const mergedResult = this.applyOverlay(\n FieldPath.EMPTY_PATH,\n this.overlayMap\n );\n if (mergedResult != null) {\n return new ObjectValue(mergedResult);\n } else {\n return this.baseObject;\n }\n }\n\n /**\n * Applies any overlays from `currentOverlays` that exist at `currentPath`\n * and returns the merged data at `currentPath` (or null if there were no\n * changes).\n *\n * @param currentPath The path at the current nesting level. Can be set to\n * FieldValue.EMPTY_PATH to represent the root.\n * @param currentOverlays The overlays at the current nesting level in the\n * same format as `overlayMap`.\n * @return The merged data at `currentPath` or null if no modifications\n * were applied.\n */\n private applyOverlay(\n currentPath: FieldPath,\n currentOverlays: Map<string, Overlay>\n ): { mapValue: api.MapValue } | null {\n let modified = false;\n\n const existingValue = this.baseObject.field(currentPath);\n const resultAtPath = isMapValue(existingValue)\n ? // If there is already data at the current path, base our\n // modifications on top of the existing data.\n { ...existingValue.mapValue.fields }\n : {};\n\n currentOverlays.forEach((value, pathSegment) => {\n if (value instanceof Map) {\n const nested = this.applyOverlay(currentPath.child(pathSegment), value);\n if (nested != null) {\n resultAtPath[pathSegment] = nested;\n modified = true;\n }\n } else if (value !== null) {\n resultAtPath[pathSegment] = value;\n modified = true;\n } else if (resultAtPath.hasOwnProperty(pathSegment)) {\n delete resultAtPath[pathSegment];\n modified = true;\n }\n });\n\n return modified ? { mapValue: { fields: resultAtPath } } : null;\n }\n}\n\n/**\n * Returns a FieldMask built from all fields in a MapValue.\n */\nexport function extractFieldMask(value: api.MapValue): FieldMask {\n const fields: FieldPath[] = [];\n forEach(value!.fields || {}, (key, value) => {\n const currentPath = new FieldPath([key]);\n if (isMapValue(value)) {\n const nestedMask = extractFieldMask(value.mapValue!);\n const nestedFields = nestedMask.fields;\n if (nestedFields.length === 0) {\n // Preserve the empty map by adding it to the FieldMask.\n fields.push(currentPath);\n } else {\n // For nested and non-empty ObjectValues, add the FieldPath of the\n // leaf nodes.\n for (const nestedPath of nestedFields) {\n fields.push(currentPath.child(nestedPath));\n }\n }\n } else {\n // For nested and non-empty ObjectValues, add the FieldPath of the leaf\n // nodes.\n fields.push(currentPath);\n }\n });\n return new FieldMask(fields);\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { fail } from '../util/assert';\nimport { Code } from '../util/error';\nimport { logError } from '../util/log';\n\n/**\n * Error Codes describing the different ways GRPC can fail. These are copied\n * directly from GRPC's sources here:\n *\n * https://github.com/grpc/grpc/blob/bceec94ea4fc5f0085d81235d8e1c06798dc341a/include/grpc%2B%2B/impl/codegen/status_code_enum.h\n *\n * Important! The names of these identifiers matter because the string forms\n * are used for reverse lookups from the webchannel stream. Do NOT change the\n * names of these identifiers or change this into a const enum.\n */\nenum RpcCode {\n OK = 0,\n CANCELLED = 1,\n UNKNOWN = 2,\n INVALID_ARGUMENT = 3,\n DEADLINE_EXCEEDED = 4,\n NOT_FOUND = 5,\n ALREADY_EXISTS = 6,\n PERMISSION_DENIED = 7,\n UNAUTHENTICATED = 16,\n RESOURCE_EXHAUSTED = 8,\n FAILED_PRECONDITION = 9,\n ABORTED = 10,\n OUT_OF_RANGE = 11,\n UNIMPLEMENTED = 12,\n INTERNAL = 13,\n UNAVAILABLE = 14,\n DATA_LOSS = 15\n}\n\n/**\n * Determines whether an error code represents a permanent error when received\n * in response to a non-write operation.\n *\n * See isPermanentWriteError for classifying write errors.\n */\nexport function isPermanentError(code: Code): boolean {\n switch (code) {\n case Code.OK:\n return fail('Treated status OK as error');\n case Code.CANCELLED:\n case Code.UNKNOWN:\n case Code.DEADLINE_EXCEEDED:\n case Code.RESOURCE_EXHAUSTED:\n case Code.INTERNAL:\n case Code.UNAVAILABLE:\n // Unauthenticated means something went wrong with our token and we need\n // to retry with new credentials which will happen automatically.\n case Code.UNAUTHENTICATED:\n return false;\n case Code.INVALID_ARGUMENT:\n case Code.NOT_FOUND:\n case Code.ALREADY_EXISTS:\n case Code.PERMISSION_DENIED:\n case Code.FAILED_PRECONDITION:\n // Aborted might be retried in some scenarios, but that is dependant on\n // the context and should handled individually by the calling code.\n // See https://cloud.google.com/apis/design/errors.\n case Code.ABORTED:\n case Code.OUT_OF_RANGE:\n case Code.UNIMPLEMENTED:\n case Code.DATA_LOSS:\n return true;\n default:\n return fail('Unknown status code: ' + code);\n }\n}\n\n/**\n * Determines whether an error code represents a permanent error when received\n * in response to a write operation.\n *\n * Write operations must be handled specially because as of b/119437764, ABORTED\n * errors on the write stream should be retried too (even though ABORTED errors\n * are not generally retryable).\n *\n * Note that during the initial handshake on the write stream an ABORTED error\n * signals that we should discard our stream token (i.e. it is permanent). This\n * means a handshake error should be classified with isPermanentError, above.\n */\nexport function isPermanentWriteError(code: Code): boolean {\n return isPermanentError(code) && code !== Code.ABORTED;\n}\n\n/**\n * Maps an error Code from a GRPC status identifier like 'NOT_FOUND'.\n *\n * @returns The Code equivalent to the given status string or undefined if\n * there is no match.\n */\nexport function mapCodeFromRpcStatus(status: string): Code | undefined {\n // lookup by string\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const code: RpcCode = RpcCode[status as any] as any;\n if (code === undefined) {\n return undefined;\n }\n\n return mapCodeFromRpcCode(code);\n}\n\n/**\n * Maps an error Code from GRPC status code number, like 0, 1, or 14. These\n * are not the same as HTTP status codes.\n *\n * @returns The Code equivalent to the given GRPC status code. Fails if there\n * is no match.\n */\nexport function mapCodeFromRpcCode(code: number | undefined): Code {\n if (code === undefined) {\n // This shouldn't normally happen, but in certain error cases (like trying\n // to send invalid proto messages) we may get an error with no GRPC code.\n logError('GRPC error has no .code');\n return Code.UNKNOWN;\n }\n\n switch (code) {\n case RpcCode.OK:\n return Code.OK;\n case RpcCode.CANCELLED:\n return Code.CANCELLED;\n case RpcCode.UNKNOWN:\n return Code.UNKNOWN;\n case RpcCode.DEADLINE_EXCEEDED:\n return Code.DEADLINE_EXCEEDED;\n case RpcCode.RESOURCE_EXHAUSTED:\n return Code.RESOURCE_EXHAUSTED;\n case RpcCode.INTERNAL:\n return Code.INTERNAL;\n case RpcCode.UNAVAILABLE:\n return Code.UNAVAILABLE;\n case RpcCode.UNAUTHENTICATED:\n return Code.UNAUTHENTICATED;\n case RpcCode.INVALID_ARGUMENT:\n return Code.INVALID_ARGUMENT;\n case RpcCode.NOT_FOUND:\n return Code.NOT_FOUND;\n case RpcCode.ALREADY_EXISTS:\n return Code.ALREADY_EXISTS;\n case RpcCode.PERMISSION_DENIED:\n return Code.PERMISSION_DENIED;\n case RpcCode.FAILED_PRECONDITION:\n return Code.FAILED_PRECONDITION;\n case RpcCode.ABORTED:\n return Code.ABORTED;\n case RpcCode.OUT_OF_RANGE:\n return Code.OUT_OF_RANGE;\n case RpcCode.UNIMPLEMENTED:\n return Code.UNIMPLEMENTED;\n case RpcCode.DATA_LOSS:\n return Code.DATA_LOSS;\n default:\n return fail('Unknown status code: ' + code);\n }\n}\n\n/**\n * Maps an RPC code from a Code. This is the reverse operation from\n * mapCodeFromRpcCode and should really only be used in tests.\n */\nexport function mapRpcCodeFromCode(code: Code | undefined): number {\n if (code === undefined) {\n return RpcCode.OK;\n }\n\n switch (code) {\n case Code.OK:\n return RpcCode.OK;\n case Code.CANCELLED:\n return RpcCode.CANCELLED;\n case Code.UNKNOWN:\n return RpcCode.UNKNOWN;\n case Code.DEADLINE_EXCEEDED:\n return RpcCode.DEADLINE_EXCEEDED;\n case Code.RESOURCE_EXHAUSTED:\n return RpcCode.RESOURCE_EXHAUSTED;\n case Code.INTERNAL:\n return RpcCode.INTERNAL;\n case Code.UNAVAILABLE:\n return RpcCode.UNAVAILABLE;\n case Code.UNAUTHENTICATED:\n return RpcCode.UNAUTHENTICATED;\n case Code.INVALID_ARGUMENT:\n return RpcCode.INVALID_ARGUMENT;\n case Code.NOT_FOUND:\n return RpcCode.NOT_FOUND;\n case Code.ALREADY_EXISTS:\n return RpcCode.ALREADY_EXISTS;\n case Code.PERMISSION_DENIED:\n return RpcCode.PERMISSION_DENIED;\n case Code.FAILED_PRECONDITION:\n return RpcCode.FAILED_PRECONDITION;\n case Code.ABORTED:\n return RpcCode.ABORTED;\n case Code.OUT_OF_RANGE:\n return RpcCode.OUT_OF_RANGE;\n case Code.UNIMPLEMENTED:\n return RpcCode.UNIMPLEMENTED;\n case Code.DATA_LOSS:\n return RpcCode.DATA_LOSS;\n default:\n return fail('Unknown status code: ' + code);\n }\n}\n\n/**\n * Converts an HTTP Status Code to the equivalent error code.\n *\n * @param status An HTTP Status Code, like 200, 404, 503, etc.\n * @returns The equivalent Code. Unknown status codes are mapped to\n * Code.UNKNOWN.\n */\nexport function mapCodeFromHttpStatus(status: number): Code {\n // The canonical error codes for Google APIs [1] specify mapping onto HTTP\n // status codes but the mapping is not bijective. In each case of ambiguity\n // this function chooses a primary error.\n //\n // [1]\n // https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto\n switch (status) {\n case 200: // OK\n return Code.OK;\n\n case 400: // Bad Request\n return Code.INVALID_ARGUMENT;\n // Other possibilities based on the forward mapping\n // return Code.FAILED_PRECONDITION;\n // return Code.OUT_OF_RANGE;\n\n case 401: // Unauthorized\n return Code.UNAUTHENTICATED;\n\n case 403: // Forbidden\n return Code.PERMISSION_DENIED;\n\n case 404: // Not Found\n return Code.NOT_FOUND;\n\n case 409: // Conflict\n return Code.ABORTED;\n // Other possibilities:\n // return Code.ALREADY_EXISTS;\n\n case 416: // Range Not Satisfiable\n return Code.OUT_OF_RANGE;\n\n case 429: // Too Many Requests\n return Code.RESOURCE_EXHAUSTED;\n\n case 499: // Client Closed Request\n return Code.CANCELLED;\n\n case 500: // Internal Server Error\n return Code.UNKNOWN;\n // Other possibilities:\n // return Code.INTERNAL;\n // return Code.DATA_LOSS;\n\n case 501: // Unimplemented\n return Code.UNIMPLEMENTED;\n\n case 503: // Service Unavailable\n return Code.UNAVAILABLE;\n\n case 504: // Gateway Timeout\n return Code.DEADLINE_EXCEEDED;\n\n default:\n if (status >= 200 && status < 300) {\n return Code.OK;\n }\n if (status >= 400 && status < 500) {\n return Code.FAILED_PRECONDITION;\n }\n if (status >= 500 && status < 600) {\n return Code.INTERNAL;\n }\n return Code.UNKNOWN;\n }\n}\n\n/**\n * Converts an HTTP response's error status to the equivalent error code.\n *\n * @param status An HTTP error response status (\"FAILED_PRECONDITION\",\n * \"UNKNOWN\", etc.)\n * @returns The equivalent Code. Non-matching responses are mapped to\n * Code.UNKNOWN.\n */\nexport function mapCodeFromHttpResponseErrorStatus(status: string): Code {\n const serverError = status.toLowerCase().replace('_', '-');\n return Object.values(Code).indexOf(serverError as Code) >= 0\n ? (serverError as Code)\n : Code.UNKNOWN;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as api from '../protos/firestore_proto_api';\n\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { fail } from '../util/assert';\n\nimport { DocumentKey } from './document_key';\nimport { ObjectValue } from './object_value';\nimport { FieldPath } from './path';\nimport { valueCompare } from './values';\n\nexport interface DocumentOptions {\n hasLocalMutations?: boolean;\n hasCommittedMutations?: boolean;\n}\n\n/**\n * The result of a lookup for a given path may be an existing document or a\n * marker that this document does not exist at a given version.\n */\nexport abstract class MaybeDocument {\n constructor(readonly key: DocumentKey, readonly version: SnapshotVersion) {}\n\n /**\n * Whether this document had a local mutation applied that has not yet been\n * acknowledged by Watch.\n */\n abstract get hasPendingWrites(): boolean;\n\n abstract isEqual(other: MaybeDocument | null | undefined): boolean;\n\n abstract toString(): string;\n}\n\n/**\n * Represents a document in Firestore with a key, version, data and whether the\n * data has local mutations applied to it.\n */\nexport class Document extends MaybeDocument {\n readonly hasLocalMutations: boolean;\n readonly hasCommittedMutations: boolean;\n\n constructor(\n key: DocumentKey,\n version: SnapshotVersion,\n private readonly objectValue: ObjectValue,\n options: DocumentOptions\n ) {\n super(key, version);\n this.hasLocalMutations = !!options.hasLocalMutations;\n this.hasCommittedMutations = !!options.hasCommittedMutations;\n }\n\n field(path: FieldPath): api.Value | null {\n return this.objectValue.field(path);\n }\n\n data(): ObjectValue {\n return this.objectValue;\n }\n\n toProto(): { mapValue: api.MapValue } {\n return this.objectValue.proto;\n }\n\n isEqual(other: MaybeDocument | null | undefined): boolean {\n return (\n other instanceof Document &&\n this.key.isEqual(other.key) &&\n this.version.isEqual(other.version) &&\n this.hasLocalMutations === other.hasLocalMutations &&\n this.hasCommittedMutations === other.hasCommittedMutations &&\n this.objectValue.isEqual(other.objectValue)\n );\n }\n\n toString(): string {\n return (\n `Document(${this.key}, ${\n this.version\n }, ${this.objectValue.toString()}, ` +\n `{hasLocalMutations: ${this.hasLocalMutations}}), ` +\n `{hasCommittedMutations: ${this.hasCommittedMutations}})`\n );\n }\n\n get hasPendingWrites(): boolean {\n return this.hasLocalMutations || this.hasCommittedMutations;\n }\n}\n\n/**\n * Compares the value for field `field` in the provided documents. Throws if\n * the field does not exist in both documents.\n */\nexport function compareDocumentsByField(\n field: FieldPath,\n d1: Document,\n d2: Document\n): number {\n const v1 = d1.field(field);\n const v2 = d2.field(field);\n if (v1 !== null && v2 !== null) {\n return valueCompare(v1, v2);\n } else {\n return fail(\"Trying to compare documents on fields that don't exist\");\n }\n}\n\n/**\n * A class representing a deleted document.\n * Version is set to 0 if we don't point to any specific time, otherwise it\n * denotes time we know it didn't exist at.\n */\nexport class NoDocument extends MaybeDocument {\n readonly hasCommittedMutations: boolean;\n\n constructor(\n key: DocumentKey,\n version: SnapshotVersion,\n options?: DocumentOptions\n ) {\n super(key, version);\n this.hasCommittedMutations = !!(options && options.hasCommittedMutations);\n }\n\n toString(): string {\n return `NoDocument(${this.key}, ${this.version})`;\n }\n\n get hasPendingWrites(): boolean {\n return this.hasCommittedMutations;\n }\n\n isEqual(other: MaybeDocument | null | undefined): boolean {\n return (\n other instanceof NoDocument &&\n other.hasCommittedMutations === this.hasCommittedMutations &&\n other.version.isEqual(this.version) &&\n other.key.isEqual(this.key)\n );\n }\n}\n\n/**\n * A class representing an existing document whose data is unknown (e.g. a\n * document that was updated without a known base document).\n */\nexport class UnknownDocument extends MaybeDocument {\n toString(): string {\n return `UnknownDocument(${this.key}, ${this.version})`;\n }\n\n get hasPendingWrites(): boolean {\n return true;\n }\n\n isEqual(other: MaybeDocument | null | undefined): boolean {\n return (\n other instanceof UnknownDocument &&\n other.version.isEqual(this.version) &&\n other.key.isEqual(this.key)\n );\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DocumentKey } from '../model/document_key';\nimport { ResourcePath } from '../model/path';\nimport { isNullOrUndefined } from '../util/types';\nimport { Bound, Filter, OrderBy } from './query';\n\n/**\n * A Target represents the WatchTarget representation of a Query, which is used\n * by the LocalStore and the RemoteStore to keep track of and to execute\n * backend queries. While a Query can represent multiple Targets, each Targets\n * maps to a single WatchTarget in RemoteStore and a single TargetData entry\n * in persistence.\n */\nexport class Target {\n private memoizedCanonicalId: string | null = null;\n\n /**\n * Initializes a Target with a path and optional additional query constraints.\n * Path must currently be empty if this is a collection group query.\n *\n * NOTE: you should always construct `Target` from `Query.toTarget` instead of\n * using this constructor, because `Query` provides an implicit `orderBy`\n * property.\n */\n constructor(\n readonly path: ResourcePath,\n readonly collectionGroup: string | null = null,\n readonly orderBy: OrderBy[] = [],\n readonly filters: Filter[] = [],\n readonly limit: number | null = null,\n readonly startAt: Bound | null = null,\n readonly endAt: Bound | null = null\n ) {}\n\n canonicalId(): string {\n if (this.memoizedCanonicalId === null) {\n let canonicalId = this.path.canonicalString();\n if (this.collectionGroup !== null) {\n canonicalId += '|cg:' + this.collectionGroup;\n }\n canonicalId += '|f:';\n canonicalId += this.filters.map(f => f.canonicalId()).join(',');\n canonicalId += '|ob:';\n canonicalId += this.orderBy.map(o => o.canonicalId()).join(',');\n\n if (!isNullOrUndefined(this.limit)) {\n canonicalId += '|l:';\n canonicalId += this.limit!;\n }\n if (this.startAt) {\n canonicalId += '|lb:';\n canonicalId += this.startAt.canonicalId();\n }\n if (this.endAt) {\n canonicalId += '|ub:';\n canonicalId += this.endAt.canonicalId();\n }\n this.memoizedCanonicalId = canonicalId;\n }\n return this.memoizedCanonicalId;\n }\n\n toString(): string {\n let str = this.path.canonicalString();\n if (this.collectionGroup !== null) {\n str += ' collectionGroup=' + this.collectionGroup;\n }\n if (this.filters.length > 0) {\n str += `, filters: [${this.filters.join(', ')}]`;\n }\n if (!isNullOrUndefined(this.limit)) {\n str += ', limit: ' + this.limit;\n }\n if (this.orderBy.length > 0) {\n str += `, orderBy: [${this.orderBy.join(', ')}]`;\n }\n if (this.startAt) {\n str += ', startAt: ' + this.startAt.canonicalId();\n }\n if (this.endAt) {\n str += ', endAt: ' + this.endAt.canonicalId();\n }\n return `Target(${str})`;\n }\n\n isEqual(other: Target): boolean {\n if (this.limit !== other.limit) {\n return false;\n }\n\n if (this.orderBy.length !== other.orderBy.length) {\n return false;\n }\n\n for (let i = 0; i < this.orderBy.length; i++) {\n if (!this.orderBy[i].isEqual(other.orderBy[i])) {\n return false;\n }\n }\n\n if (this.filters.length !== other.filters.length) {\n return false;\n }\n\n for (let i = 0; i < this.filters.length; i++) {\n if (!this.filters[i].isEqual(other.filters[i])) {\n return false;\n }\n }\n\n if (this.collectionGroup !== other.collectionGroup) {\n return false;\n }\n\n if (!this.path.isEqual(other.path)) {\n return false;\n }\n\n if (\n this.startAt !== null\n ? !this.startAt.isEqual(other.startAt)\n : other.startAt !== null\n ) {\n return false;\n }\n\n return this.endAt !== null\n ? this.endAt.isEqual(other.endAt)\n : other.endAt === null;\n }\n\n isDocumentQuery(): boolean {\n return (\n DocumentKey.isDocumentKey(this.path) &&\n this.collectionGroup === null &&\n this.filters.length === 0\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as api from '../protos/firestore_proto_api';\n\nimport { compareDocumentsByField, Document } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport {\n canonicalId,\n valueCompare,\n arrayValueContains,\n valueEquals,\n isArray,\n isNanValue,\n isNullValue,\n isReferenceValue,\n typeOrder\n} from '../model/values';\nimport { FieldPath, ResourcePath } from '../model/path';\nimport { debugAssert, fail } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport { isNullOrUndefined } from '../util/types';\nimport { Target } from './target';\n\nexport const enum LimitType {\n First = 'F',\n Last = 'L'\n}\n\n/**\n * Query encapsulates all the query attributes we support in the SDK. It can\n * be run against the LocalStore, as well as be converted to a `Target` to\n * query the RemoteStore results.\n */\nexport class Query {\n static atPath(path: ResourcePath): Query {\n return new Query(path);\n }\n\n private memoizedOrderBy: OrderBy[] | null = null;\n\n // The corresponding `Target` of this `Query` instance.\n private memoizedTarget: Target | null = null;\n\n /**\n * Initializes a Query with a path and optional additional query constraints.\n * Path must currently be empty if this is a collection group query.\n */\n constructor(\n readonly path: ResourcePath,\n readonly collectionGroup: string | null = null,\n readonly explicitOrderBy: OrderBy[] = [],\n readonly filters: Filter[] = [],\n readonly limit: number | null = null,\n readonly limitType: LimitType = LimitType.First,\n readonly startAt: Bound | null = null,\n readonly endAt: Bound | null = null\n ) {\n if (this.startAt) {\n this.assertValidBound(this.startAt);\n }\n if (this.endAt) {\n this.assertValidBound(this.endAt);\n }\n }\n\n get orderBy(): OrderBy[] {\n if (this.memoizedOrderBy === null) {\n this.memoizedOrderBy = [];\n\n const inequalityField = this.getInequalityFilterField();\n const firstOrderByField = this.getFirstOrderByField();\n if (inequalityField !== null && firstOrderByField === null) {\n // In order to implicitly add key ordering, we must also add the\n // inequality filter field for it to be a valid query.\n // Note that the default inequality field and key ordering is ascending.\n if (!inequalityField.isKeyField()) {\n this.memoizedOrderBy.push(new OrderBy(inequalityField));\n }\n this.memoizedOrderBy.push(\n new OrderBy(FieldPath.keyField(), Direction.ASCENDING)\n );\n } else {\n debugAssert(\n inequalityField === null ||\n (firstOrderByField !== null &&\n inequalityField.isEqual(firstOrderByField)),\n 'First orderBy should match inequality field.'\n );\n let foundKeyOrdering = false;\n for (const orderBy of this.explicitOrderBy) {\n this.memoizedOrderBy.push(orderBy);\n if (orderBy.field.isKeyField()) {\n foundKeyOrdering = true;\n }\n }\n if (!foundKeyOrdering) {\n // The order of the implicit key ordering always matches the last\n // explicit order by\n const lastDirection =\n this.explicitOrderBy.length > 0\n ? this.explicitOrderBy[this.explicitOrderBy.length - 1].dir\n : Direction.ASCENDING;\n this.memoizedOrderBy.push(\n new OrderBy(FieldPath.keyField(), lastDirection)\n );\n }\n }\n }\n return this.memoizedOrderBy;\n }\n\n addFilter(filter: Filter): Query {\n debugAssert(\n this.getInequalityFilterField() == null ||\n !(filter instanceof FieldFilter) ||\n !filter.isInequality() ||\n filter.field.isEqual(this.getInequalityFilterField()!),\n 'Query must only have one inequality field.'\n );\n\n debugAssert(\n !this.isDocumentQuery(),\n 'No filtering allowed for document query'\n );\n\n const newFilters = this.filters.concat([filter]);\n return new Query(\n this.path,\n this.collectionGroup,\n this.explicitOrderBy.slice(),\n newFilters,\n this.limit,\n this.limitType,\n this.startAt,\n this.endAt\n );\n }\n\n addOrderBy(orderBy: OrderBy): Query {\n debugAssert(\n !this.startAt && !this.endAt,\n 'Bounds must be set after orderBy'\n );\n // TODO(dimond): validate that orderBy does not list the same key twice.\n const newOrderBy = this.explicitOrderBy.concat([orderBy]);\n return new Query(\n this.path,\n this.collectionGroup,\n newOrderBy,\n this.filters.slice(),\n this.limit,\n this.limitType,\n this.startAt,\n this.endAt\n );\n }\n\n withLimitToFirst(limit: number | null): Query {\n return new Query(\n this.path,\n this.collectionGroup,\n this.explicitOrderBy.slice(),\n this.filters.slice(),\n limit,\n LimitType.First,\n this.startAt,\n this.endAt\n );\n }\n\n withLimitToLast(limit: number | null): Query {\n return new Query(\n this.path,\n this.collectionGroup,\n this.explicitOrderBy.slice(),\n this.filters.slice(),\n limit,\n LimitType.Last,\n this.startAt,\n this.endAt\n );\n }\n\n withStartAt(bound: Bound): Query {\n return new Query(\n this.path,\n this.collectionGroup,\n this.explicitOrderBy.slice(),\n this.filters.slice(),\n this.limit,\n this.limitType,\n bound,\n this.endAt\n );\n }\n\n withEndAt(bound: Bound): Query {\n return new Query(\n this.path,\n this.collectionGroup,\n this.explicitOrderBy.slice(),\n this.filters.slice(),\n this.limit,\n this.limitType,\n this.startAt,\n bound\n );\n }\n\n /**\n * Helper to convert a collection group query into a collection query at a\n * specific path. This is used when executing collection group queries, since\n * we have to split the query into a set of collection queries at multiple\n * paths.\n */\n asCollectionQueryAtPath(path: ResourcePath): Query {\n return new Query(\n path,\n /*collectionGroup=*/ null,\n this.explicitOrderBy.slice(),\n this.filters.slice(),\n this.limit,\n this.limitType,\n this.startAt,\n this.endAt\n );\n }\n\n /**\n * Returns true if this query does not specify any query constraints that\n * could remove results.\n */\n matchesAllDocuments(): boolean {\n return (\n this.filters.length === 0 &&\n this.limit === null &&\n this.startAt == null &&\n this.endAt == null &&\n (this.explicitOrderBy.length === 0 ||\n (this.explicitOrderBy.length === 1 &&\n this.explicitOrderBy[0].field.isKeyField()))\n );\n }\n\n // TODO(b/29183165): This is used to get a unique string from a query to, for\n // example, use as a dictionary key, but the implementation is subject to\n // collisions. Make it collision-free.\n canonicalId(): string {\n return `${this.toTarget().canonicalId()}|lt:${this.limitType}`;\n }\n\n toString(): string {\n return `Query(target=${this.toTarget().toString()}; limitType=${\n this.limitType\n })`;\n }\n\n isEqual(other: Query): boolean {\n return (\n this.toTarget().isEqual(other.toTarget()) &&\n this.limitType === other.limitType\n );\n }\n\n docComparator(d1: Document, d2: Document): number {\n let comparedOnKeyField = false;\n for (const orderBy of this.orderBy) {\n const comp = orderBy.compare(d1, d2);\n if (comp !== 0) {\n return comp;\n }\n comparedOnKeyField = comparedOnKeyField || orderBy.field.isKeyField();\n }\n // Assert that we actually compared by key\n debugAssert(\n comparedOnKeyField,\n \"orderBy used that doesn't compare on key field\"\n );\n return 0;\n }\n\n matches(doc: Document): boolean {\n return (\n this.matchesPathAndCollectionGroup(doc) &&\n this.matchesOrderBy(doc) &&\n this.matchesFilters(doc) &&\n this.matchesBounds(doc)\n );\n }\n\n hasLimitToFirst(): boolean {\n return !isNullOrUndefined(this.limit) && this.limitType === LimitType.First;\n }\n\n hasLimitToLast(): boolean {\n return !isNullOrUndefined(this.limit) && this.limitType === LimitType.Last;\n }\n\n getFirstOrderByField(): FieldPath | null {\n return this.explicitOrderBy.length > 0\n ? this.explicitOrderBy[0].field\n : null;\n }\n\n getInequalityFilterField(): FieldPath | null {\n for (const filter of this.filters) {\n if (filter instanceof FieldFilter && filter.isInequality()) {\n return filter.field;\n }\n }\n return null;\n }\n\n // Checks if any of the provided Operators are included in the query and\n // returns the first one that is, or null if none are.\n findFilterOperator(operators: Operator[]): Operator | null {\n for (const filter of this.filters) {\n if (filter instanceof FieldFilter) {\n if (operators.indexOf(filter.op) >= 0) {\n return filter.op;\n }\n }\n }\n return null;\n }\n\n isDocumentQuery(): boolean {\n return this.toTarget().isDocumentQuery();\n }\n\n isCollectionGroupQuery(): boolean {\n return this.collectionGroup !== null;\n }\n\n /**\n * Converts this `Query` instance to it's corresponding `Target`\n * representation.\n */\n toTarget(): Target {\n if (!this.memoizedTarget) {\n if (this.limitType === LimitType.First) {\n this.memoizedTarget = new Target(\n this.path,\n this.collectionGroup,\n this.orderBy,\n this.filters,\n this.limit,\n this.startAt,\n this.endAt\n );\n } else {\n // Flip the orderBy directions since we want the last results\n const orderBys = [] as OrderBy[];\n for (const orderBy of this.orderBy) {\n const dir =\n orderBy.dir === Direction.DESCENDING\n ? Direction.ASCENDING\n : Direction.DESCENDING;\n orderBys.push(new OrderBy(orderBy.field, dir));\n }\n\n // We need to swap the cursors to match the now-flipped query ordering.\n const startAt = this.endAt\n ? new Bound(this.endAt.position, !this.endAt.before)\n : null;\n const endAt = this.startAt\n ? new Bound(this.startAt.position, !this.startAt.before)\n : null;\n\n // Now return as a LimitType.First query.\n this.memoizedTarget = new Target(\n this.path,\n this.collectionGroup,\n orderBys,\n this.filters,\n this.limit,\n startAt,\n endAt\n );\n }\n }\n return this.memoizedTarget!;\n }\n\n private matchesPathAndCollectionGroup(doc: Document): boolean {\n const docPath = doc.key.path;\n if (this.collectionGroup !== null) {\n // NOTE: this.path is currently always empty since we don't expose Collection\n // Group queries rooted at a document path yet.\n return (\n doc.key.hasCollectionId(this.collectionGroup) &&\n this.path.isPrefixOf(docPath)\n );\n } else if (DocumentKey.isDocumentKey(this.path)) {\n // exact match for document queries\n return this.path.isEqual(docPath);\n } else {\n // shallow ancestor queries by default\n return this.path.isImmediateParentOf(docPath);\n }\n }\n\n /**\n * A document must have a value for every ordering clause in order to show up\n * in the results.\n */\n private matchesOrderBy(doc: Document): boolean {\n for (const orderBy of this.explicitOrderBy) {\n // order by key always matches\n if (!orderBy.field.isKeyField() && doc.field(orderBy.field) === null) {\n return false;\n }\n }\n return true;\n }\n\n private matchesFilters(doc: Document): boolean {\n for (const filter of this.filters) {\n if (!filter.matches(doc)) {\n return false;\n }\n }\n return true;\n }\n\n /**\n * Makes sure a document is within the bounds, if provided.\n */\n private matchesBounds(doc: Document): boolean {\n if (this.startAt && !this.startAt.sortsBeforeDocument(this.orderBy, doc)) {\n return false;\n }\n if (this.endAt && this.endAt.sortsBeforeDocument(this.orderBy, doc)) {\n return false;\n }\n return true;\n }\n\n private assertValidBound(bound: Bound): void {\n debugAssert(\n bound.position.length <= this.orderBy.length,\n 'Bound is longer than orderBy'\n );\n }\n}\n\nexport abstract class Filter {\n abstract matches(doc: Document): boolean;\n abstract canonicalId(): string;\n abstract isEqual(filter: Filter): boolean;\n}\n\nexport const enum Operator {\n LESS_THAN = '<',\n LESS_THAN_OR_EQUAL = '<=',\n EQUAL = '==',\n GREATER_THAN = '>',\n GREATER_THAN_OR_EQUAL = '>=',\n ARRAY_CONTAINS = 'array-contains',\n IN = 'in',\n ARRAY_CONTAINS_ANY = 'array-contains-any'\n}\n\nexport class FieldFilter extends Filter {\n protected constructor(\n public field: FieldPath,\n public op: Operator,\n public value: api.Value\n ) {\n super();\n }\n\n /**\n * Creates a filter based on the provided arguments.\n */\n static create(field: FieldPath, op: Operator, value: api.Value): FieldFilter {\n if (field.isKeyField()) {\n if (op === Operator.IN) {\n debugAssert(\n isArray(value),\n 'Comparing on key with IN, but filter value not an ArrayValue'\n );\n debugAssert(\n (value.arrayValue.values || []).every(elem => isReferenceValue(elem)),\n 'Comparing on key with IN, but an array value was not a RefValue'\n );\n return new KeyFieldInFilter(field, value);\n } else {\n debugAssert(\n isReferenceValue(value),\n 'Comparing on key, but filter value not a RefValue'\n );\n debugAssert(\n op !== Operator.ARRAY_CONTAINS && op !== Operator.ARRAY_CONTAINS_ANY,\n `'${op.toString()}' queries don't make sense on document keys.`\n );\n return new KeyFieldFilter(field, op, value);\n }\n } else if (isNullValue(value)) {\n if (op !== Operator.EQUAL) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid query. Null supports only equality comparisons.'\n );\n }\n return new FieldFilter(field, op, value);\n } else if (isNanValue(value)) {\n if (op !== Operator.EQUAL) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid query. NaN supports only equality comparisons.'\n );\n }\n return new FieldFilter(field, op, value);\n } else if (op === Operator.ARRAY_CONTAINS) {\n return new ArrayContainsFilter(field, value);\n } else if (op === Operator.IN) {\n debugAssert(\n isArray(value),\n 'IN filter has invalid value: ' + value.toString()\n );\n return new InFilter(field, value);\n } else if (op === Operator.ARRAY_CONTAINS_ANY) {\n debugAssert(\n isArray(value),\n 'ARRAY_CONTAINS_ANY filter has invalid value: ' + value.toString()\n );\n return new ArrayContainsAnyFilter(field, value);\n } else {\n return new FieldFilter(field, op, value);\n }\n }\n\n matches(doc: Document): boolean {\n const other = doc.field(this.field);\n\n // Only compare types with matching backend order (such as double and int).\n return (\n other !== null &&\n typeOrder(this.value) === typeOrder(other) &&\n this.matchesComparison(valueCompare(other, this.value))\n );\n }\n\n protected matchesComparison(comparison: number): boolean {\n switch (this.op) {\n case Operator.LESS_THAN:\n return comparison < 0;\n case Operator.LESS_THAN_OR_EQUAL:\n return comparison <= 0;\n case Operator.EQUAL:\n return comparison === 0;\n case Operator.GREATER_THAN:\n return comparison > 0;\n case Operator.GREATER_THAN_OR_EQUAL:\n return comparison >= 0;\n default:\n return fail('Unknown FieldFilter operator: ' + this.op);\n }\n }\n\n isInequality(): boolean {\n return (\n [\n Operator.LESS_THAN,\n Operator.LESS_THAN_OR_EQUAL,\n Operator.GREATER_THAN,\n Operator.GREATER_THAN_OR_EQUAL\n ].indexOf(this.op) >= 0\n );\n }\n\n canonicalId(): string {\n // TODO(b/29183165): Technically, this won't be unique if two values have\n // the same description, such as the int 3 and the string \"3\". So we should\n // add the types in here somehow, too.\n return (\n this.field.canonicalString() +\n this.op.toString() +\n canonicalId(this.value)\n );\n }\n\n isEqual(other: Filter): boolean {\n if (other instanceof FieldFilter) {\n return (\n this.op === other.op &&\n this.field.isEqual(other.field) &&\n valueEquals(this.value, other.value)\n );\n } else {\n return false;\n }\n }\n\n toString(): string {\n return `${this.field.canonicalString()} ${this.op} ${canonicalId(\n this.value\n )}`;\n }\n}\n\n/** Filter that matches on key fields (i.e. '__name__'). */\nexport class KeyFieldFilter extends FieldFilter {\n private readonly key: DocumentKey;\n\n constructor(field: FieldPath, op: Operator, value: api.Value) {\n super(field, op, value);\n debugAssert(\n isReferenceValue(value),\n 'KeyFieldFilter expects a ReferenceValue'\n );\n this.key = DocumentKey.fromName(value.referenceValue);\n }\n\n matches(doc: Document): boolean {\n const comparison = DocumentKey.comparator(doc.key, this.key);\n return this.matchesComparison(comparison);\n }\n}\n\n/** Filter that matches on key fields within an array. */\nexport class KeyFieldInFilter extends FieldFilter {\n private readonly keys: DocumentKey[];\n\n constructor(field: FieldPath, value: api.Value) {\n super(field, Operator.IN, value);\n debugAssert(isArray(value), 'KeyFieldInFilter expects an ArrayValue');\n this.keys = (value.arrayValue.values || []).map(v => {\n debugAssert(\n isReferenceValue(v),\n 'Comparing on key with IN, but an array value was not a ReferenceValue'\n );\n return DocumentKey.fromName(v.referenceValue);\n });\n }\n\n matches(doc: Document): boolean {\n return this.keys.some(key => key.isEqual(doc.key));\n }\n}\n\n/** A Filter that implements the array-contains operator. */\nexport class ArrayContainsFilter extends FieldFilter {\n constructor(field: FieldPath, value: api.Value) {\n super(field, Operator.ARRAY_CONTAINS, value);\n }\n\n matches(doc: Document): boolean {\n const other = doc.field(this.field);\n return isArray(other) && arrayValueContains(other.arrayValue, this.value);\n }\n}\n\n/** A Filter that implements the IN operator. */\nexport class InFilter extends FieldFilter {\n constructor(field: FieldPath, value: api.Value) {\n super(field, Operator.IN, value);\n debugAssert(isArray(value), 'InFilter expects an ArrayValue');\n }\n\n matches(doc: Document): boolean {\n const other = doc.field(this.field);\n return other !== null && arrayValueContains(this.value.arrayValue!, other);\n }\n}\n\n/** A Filter that implements the array-contains-any operator. */\nexport class ArrayContainsAnyFilter extends FieldFilter {\n constructor(field: FieldPath, value: api.Value) {\n super(field, Operator.ARRAY_CONTAINS_ANY, value);\n debugAssert(isArray(value), 'ArrayContainsAnyFilter expects an ArrayValue');\n }\n\n matches(doc: Document): boolean {\n const other = doc.field(this.field);\n if (!isArray(other) || !other.arrayValue.values) {\n return false;\n }\n return other.arrayValue.values.some(val =>\n arrayValueContains(this.value.arrayValue!, val)\n );\n }\n}\n\n/**\n * The direction of sorting in an order by.\n */\nexport const enum Direction {\n ASCENDING = 'asc',\n DESCENDING = 'desc'\n}\n\n/**\n * Represents a bound of a query.\n *\n * The bound is specified with the given components representing a position and\n * whether it's just before or just after the position (relative to whatever the\n * query order is).\n *\n * The position represents a logical index position for a query. It's a prefix\n * of values for the (potentially implicit) order by clauses of a query.\n *\n * Bound provides a function to determine whether a document comes before or\n * after a bound. This is influenced by whether the position is just before or\n * just after the provided values.\n */\nexport class Bound {\n constructor(readonly position: api.Value[], readonly before: boolean) {}\n\n canonicalId(): string {\n // TODO(b/29183165): Make this collision robust.\n return `${this.before ? 'b' : 'a'}:${this.position\n .map(p => canonicalId(p))\n .join(',')}`;\n }\n\n /**\n * Returns true if a document sorts before a bound using the provided sort\n * order.\n */\n sortsBeforeDocument(orderBy: OrderBy[], doc: Document): boolean {\n debugAssert(\n this.position.length <= orderBy.length,\n \"Bound has more components than query's orderBy\"\n );\n let comparison = 0;\n for (let i = 0; i < this.position.length; i++) {\n const orderByComponent = orderBy[i];\n const component = this.position[i];\n if (orderByComponent.field.isKeyField()) {\n debugAssert(\n isReferenceValue(component),\n 'Bound has a non-key value where the key path is being used.'\n );\n comparison = DocumentKey.comparator(\n DocumentKey.fromName(component.referenceValue),\n doc.key\n );\n } else {\n const docValue = doc.field(orderByComponent.field);\n debugAssert(\n docValue !== null,\n 'Field should exist since document matched the orderBy already.'\n );\n comparison = valueCompare(component, docValue);\n }\n if (orderByComponent.dir === Direction.DESCENDING) {\n comparison = comparison * -1;\n }\n if (comparison !== 0) {\n break;\n }\n }\n return this.before ? comparison <= 0 : comparison < 0;\n }\n\n isEqual(other: Bound | null): boolean {\n if (other === null) {\n return false;\n }\n if (\n this.before !== other.before ||\n this.position.length !== other.position.length\n ) {\n return false;\n }\n for (let i = 0; i < this.position.length; i++) {\n const thisPosition = this.position[i];\n const otherPosition = other.position[i];\n if (!valueEquals(thisPosition, otherPosition)) {\n return false;\n }\n }\n return true;\n }\n}\n\n/**\n * An ordering on a field, in some Direction. Direction defaults to ASCENDING.\n */\nexport class OrderBy {\n readonly dir: Direction;\n private readonly isKeyOrderBy: boolean;\n\n constructor(readonly field: FieldPath, dir?: Direction) {\n if (dir === undefined) {\n dir = Direction.ASCENDING;\n }\n this.dir = dir;\n this.isKeyOrderBy = field.isKeyField();\n }\n\n compare(d1: Document, d2: Document): number {\n const comparison = this.isKeyOrderBy\n ? DocumentKey.comparator(d1.key, d2.key)\n : compareDocumentsByField(this.field, d1, d2);\n switch (this.dir) {\n case Direction.ASCENDING:\n return comparison;\n case Direction.DESCENDING:\n return -1 * comparison;\n default:\n return fail('Unknown direction: ' + this.dir);\n }\n }\n\n canonicalId(): string {\n // TODO(b/29183165): Make this collision robust.\n return this.field.canonicalString() + this.dir.toString();\n }\n\n toString(): string {\n return `${this.field.canonicalString()} (${this.dir})`;\n }\n\n isEqual(other: OrderBy): boolean {\n return this.dir === other.dir && this.field.isEqual(other.field);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { Target } from '../core/target';\nimport { ListenSequenceNumber, TargetId } from '../core/types';\nimport { ByteString } from '../util/byte_string';\n\n/** An enumeration of the different purposes we have for targets. */\nexport const enum TargetPurpose {\n /** A regular, normal query target. */\n Listen,\n\n /**\n * The query target was used to refill a query after an existence filter mismatch.\n */\n ExistenceFilterMismatch,\n\n /** The query target was used to resolve a limbo document. */\n LimboResolution\n}\n\n/**\n * An immutable set of metadata that the local store tracks for each target.\n */\nexport class TargetData {\n constructor(\n /** The target being listened to. */\n readonly target: Target,\n /**\n * The target ID to which the target corresponds; Assigned by the\n * LocalStore for user listens and by the SyncEngine for limbo watches.\n */\n readonly targetId: TargetId,\n /** The purpose of the target. */\n readonly purpose: TargetPurpose,\n /**\n * The sequence number of the last transaction during which this target data\n * was modified.\n */\n readonly sequenceNumber: ListenSequenceNumber,\n /** The latest snapshot version seen for this target. */\n readonly snapshotVersion: SnapshotVersion = SnapshotVersion.min(),\n /**\n * The maximum snapshot version at which the associated view\n * contained no limbo documents.\n */\n readonly lastLimboFreeSnapshotVersion: SnapshotVersion = SnapshotVersion.min(),\n /**\n * An opaque, server-assigned token that allows watching a target to be\n * resumed after disconnecting without retransmitting all the data that\n * matches the target. The resume token essentially identifies a point in\n * time from which the server should resume sending results.\n */\n readonly resumeToken: ByteString = ByteString.EMPTY_BYTE_STRING\n ) {}\n\n /** Creates a new target data instance with an updated sequence number. */\n withSequenceNumber(sequenceNumber: number): TargetData {\n return new TargetData(\n this.target,\n this.targetId,\n this.purpose,\n sequenceNumber,\n this.snapshotVersion,\n this.lastLimboFreeSnapshotVersion,\n this.resumeToken\n );\n }\n\n /**\n * Creates a new target data instance with an updated resume token and\n * snapshot version.\n */\n withResumeToken(\n resumeToken: ByteString,\n snapshotVersion: SnapshotVersion\n ): TargetData {\n return new TargetData(\n this.target,\n this.targetId,\n this.purpose,\n this.sequenceNumber,\n snapshotVersion,\n this.lastLimboFreeSnapshotVersion,\n resumeToken\n );\n }\n\n /**\n * Creates a new target data instance with an updated last limbo free\n * snapshot version number.\n */\n withLastLimboFreeSnapshotVersion(\n lastLimboFreeSnapshotVersion: SnapshotVersion\n ): TargetData {\n return new TargetData(\n this.target,\n this.targetId,\n this.purpose,\n this.sequenceNumber,\n this.snapshotVersion,\n lastLimboFreeSnapshotVersion,\n this.resumeToken\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport class ExistenceFilter {\n // TODO(b/33078163): just use simplest form of existence filter for now\n constructor(public count: number) {}\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { debugAssert, fail } from './assert';\n\n/*\n * Implementation of an immutable SortedMap using a Left-leaning\n * Red-Black Tree, adapted from the implementation in Mugs\n * (http://mads379.github.com/mugs/) by Mads Hartmann Jensen\n * (mads379@gmail.com).\n *\n * Original paper on Left-leaning Red-Black Trees:\n * http://www.cs.princeton.edu/~rs/talks/LLRB/LLRB.pdf\n *\n * Invariant 1: No red node has a red child\n * Invariant 2: Every leaf path has the same number of black nodes\n * Invariant 3: Only the left child can be red (left leaning)\n */\n\nexport type Comparator<K> = (key1: K, key2: K) => number;\n\nexport interface Entry<K, V> {\n key: K;\n value: V;\n}\n\n// An immutable sorted map implementation, based on a Left-leaning Red-Black\n// tree.\nexport class SortedMap<K, V> {\n // visible for testing\n root: LLRBNode<K, V> | LLRBEmptyNode<K, V>;\n\n constructor(\n public comparator: Comparator<K>,\n root?: LLRBNode<K, V> | LLRBEmptyNode<K, V>\n ) {\n this.root = root ? root : LLRBNode.EMPTY;\n }\n\n // Returns a copy of the map, with the specified key/value added or replaced.\n insert(key: K, value: V): SortedMap<K, V> {\n return new SortedMap<K, V>(\n this.comparator,\n this.root\n .insert(key, value, this.comparator)\n .copy(null, null, LLRBNode.BLACK, null, null)\n );\n }\n\n // Returns a copy of the map, with the specified key removed.\n remove(key: K): SortedMap<K, V> {\n return new SortedMap<K, V>(\n this.comparator,\n this.root\n .remove(key, this.comparator)\n .copy(null, null, LLRBNode.BLACK, null, null)\n );\n }\n\n // Returns the value of the node with the given key, or null.\n get(key: K): V | null {\n let node = this.root;\n while (!node.isEmpty()) {\n const cmp = this.comparator(key, node.key);\n if (cmp === 0) {\n return node.value;\n } else if (cmp < 0) {\n node = node.left;\n } else if (cmp > 0) {\n node = node.right;\n }\n }\n return null;\n }\n\n // Returns the index of the element in this sorted map, or -1 if it doesn't\n // exist.\n indexOf(key: K): number {\n // Number of nodes that were pruned when descending right\n let prunedNodes = 0;\n let node = this.root;\n while (!node.isEmpty()) {\n const cmp = this.comparator(key, node.key);\n if (cmp === 0) {\n return prunedNodes + node.left.size;\n } else if (cmp < 0) {\n node = node.left;\n } else {\n // Count all nodes left of the node plus the node itself\n prunedNodes += node.left.size + 1;\n node = node.right;\n }\n }\n // Node not found\n return -1;\n }\n\n isEmpty(): boolean {\n return this.root.isEmpty();\n }\n\n // Returns the total number of nodes in the map.\n get size(): number {\n return this.root.size;\n }\n\n // Returns the minimum key in the map.\n minKey(): K | null {\n return this.root.minKey();\n }\n\n // Returns the maximum key in the map.\n maxKey(): K | null {\n return this.root.maxKey();\n }\n\n // Traverses the map in key order and calls the specified action function\n // for each key/value pair. If action returns true, traversal is aborted.\n // Returns the first truthy value returned by action, or the last falsey\n // value returned by action.\n inorderTraversal<T>(action: (k: K, v: V) => T): T {\n return (this.root as LLRBNode<K, V>).inorderTraversal(action);\n }\n\n forEach(fn: (k: K, v: V) => void): void {\n this.inorderTraversal((k, v) => {\n fn(k, v);\n return false;\n });\n }\n\n toString(): string {\n const descriptions: string[] = [];\n this.inorderTraversal((k, v) => {\n descriptions.push(`${k}:${v}`);\n return false;\n });\n return `{${descriptions.join(', ')}}`;\n }\n\n // Traverses the map in reverse key order and calls the specified action\n // function for each key/value pair. If action returns true, traversal is\n // aborted.\n // Returns the first truthy value returned by action, or the last falsey\n // value returned by action.\n reverseTraversal<T>(action: (k: K, v: V) => T): T {\n return (this.root as LLRBNode<K, V>).reverseTraversal(action);\n }\n\n // Returns an iterator over the SortedMap.\n getIterator(): SortedMapIterator<K, V> {\n return new SortedMapIterator<K, V>(this.root, null, this.comparator, false);\n }\n\n getIteratorFrom(key: K): SortedMapIterator<K, V> {\n return new SortedMapIterator<K, V>(this.root, key, this.comparator, false);\n }\n\n getReverseIterator(): SortedMapIterator<K, V> {\n return new SortedMapIterator<K, V>(this.root, null, this.comparator, true);\n }\n\n getReverseIteratorFrom(key: K): SortedMapIterator<K, V> {\n return new SortedMapIterator<K, V>(this.root, key, this.comparator, true);\n }\n} // end SortedMap\n\n// An iterator over an LLRBNode.\nexport class SortedMapIterator<K, V> {\n private isReverse: boolean;\n private nodeStack: Array<LLRBNode<K, V> | LLRBEmptyNode<K, V>>;\n\n constructor(\n node: LLRBNode<K, V> | LLRBEmptyNode<K, V>,\n startKey: K | null,\n comparator: Comparator<K>,\n isReverse: boolean\n ) {\n this.isReverse = isReverse;\n this.nodeStack = [];\n\n let cmp = 1;\n while (!node.isEmpty()) {\n cmp = startKey ? comparator(node.key, startKey) : 1;\n // flip the comparison if we're going in reverse\n if (isReverse) {\n cmp *= -1;\n }\n\n if (cmp < 0) {\n // This node is less than our start key. ignore it\n if (this.isReverse) {\n node = node.left;\n } else {\n node = node.right;\n }\n } else if (cmp === 0) {\n // This node is exactly equal to our start key. Push it on the stack,\n // but stop iterating;\n this.nodeStack.push(node);\n break;\n } else {\n // This node is greater than our start key, add it to the stack and move\n // to the next one\n this.nodeStack.push(node);\n if (this.isReverse) {\n node = node.right;\n } else {\n node = node.left;\n }\n }\n }\n }\n\n getNext(): Entry<K, V> {\n debugAssert(\n this.nodeStack.length > 0,\n 'getNext() called on iterator when hasNext() is false.'\n );\n\n let node = this.nodeStack.pop()!;\n const result = { key: node.key, value: node.value };\n\n if (this.isReverse) {\n node = node.left;\n while (!node.isEmpty()) {\n this.nodeStack.push(node);\n node = node.right;\n }\n } else {\n node = node.right;\n while (!node.isEmpty()) {\n this.nodeStack.push(node);\n node = node.left;\n }\n }\n\n return result;\n }\n\n hasNext(): boolean {\n return this.nodeStack.length > 0;\n }\n\n peek(): Entry<K, V> | null {\n if (this.nodeStack.length === 0) {\n return null;\n }\n\n const node = this.nodeStack[this.nodeStack.length - 1];\n return { key: node.key, value: node.value };\n }\n} // end SortedMapIterator\n\n// Represents a node in a Left-leaning Red-Black tree.\nexport class LLRBNode<K, V> {\n readonly color: boolean;\n readonly left: LLRBNode<K, V> | LLRBEmptyNode<K, V>;\n readonly right: LLRBNode<K, V> | LLRBEmptyNode<K, V>;\n readonly size: number;\n\n // Empty node is shared between all LLRB trees.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n static EMPTY: LLRBEmptyNode<any, any> = null as any;\n\n static RED = true;\n static BLACK = false;\n\n constructor(\n public key: K,\n public value: V,\n color?: boolean,\n left?: LLRBNode<K, V> | LLRBEmptyNode<K, V>,\n right?: LLRBNode<K, V> | LLRBEmptyNode<K, V>\n ) {\n this.color = color != null ? color : LLRBNode.RED;\n this.left = left != null ? left : LLRBNode.EMPTY;\n this.right = right != null ? right : LLRBNode.EMPTY;\n this.size = this.left.size + 1 + this.right.size;\n }\n\n // Returns a copy of the current node, optionally replacing pieces of it.\n copy(\n key: K | null,\n value: V | null,\n color: boolean | null,\n left: LLRBNode<K, V> | LLRBEmptyNode<K, V> | null,\n right: LLRBNode<K, V> | LLRBEmptyNode<K, V> | null\n ): LLRBNode<K, V> {\n return new LLRBNode<K, V>(\n key != null ? key : this.key,\n value != null ? value : this.value,\n color != null ? color : this.color,\n left != null ? left : this.left,\n right != null ? right : this.right\n );\n }\n\n isEmpty(): boolean {\n return false;\n }\n\n // Traverses the tree in key order and calls the specified action function\n // for each node. If action returns true, traversal is aborted.\n // Returns the first truthy value returned by action, or the last falsey\n // value returned by action.\n inorderTraversal<T>(action: (k: K, v: V) => T): T {\n return (\n (this.left as LLRBNode<K, V>).inorderTraversal(action) ||\n action(this.key, this.value) ||\n (this.right as LLRBNode<K, V>).inorderTraversal(action)\n );\n }\n\n // Traverses the tree in reverse key order and calls the specified action\n // function for each node. If action returns true, traversal is aborted.\n // Returns the first truthy value returned by action, or the last falsey\n // value returned by action.\n reverseTraversal<T>(action: (k: K, v: V) => T): T {\n return (\n (this.right as LLRBNode<K, V>).reverseTraversal(action) ||\n action(this.key, this.value) ||\n (this.left as LLRBNode<K, V>).reverseTraversal(action)\n );\n }\n\n // Returns the minimum node in the tree.\n private min(): LLRBNode<K, V> {\n if (this.left.isEmpty()) {\n return this;\n } else {\n return (this.left as LLRBNode<K, V>).min();\n }\n }\n\n // Returns the maximum key in the tree.\n minKey(): K | null {\n return this.min().key;\n }\n\n // Returns the maximum key in the tree.\n maxKey(): K | null {\n if (this.right.isEmpty()) {\n return this.key;\n } else {\n return this.right.maxKey();\n }\n }\n\n // Returns new tree, with the key/value added.\n insert(key: K, value: V, comparator: Comparator<K>): LLRBNode<K, V> {\n let n: LLRBNode<K, V> = this;\n const cmp = comparator(key, n.key);\n if (cmp < 0) {\n n = n.copy(null, null, null, n.left.insert(key, value, comparator), null);\n } else if (cmp === 0) {\n n = n.copy(null, value, null, null, null);\n } else {\n n = n.copy(\n null,\n null,\n null,\n null,\n n.right.insert(key, value, comparator)\n );\n }\n return n.fixUp();\n }\n\n private removeMin(): LLRBNode<K, V> | LLRBEmptyNode<K, V> {\n if (this.left.isEmpty()) {\n return LLRBNode.EMPTY;\n }\n let n: LLRBNode<K, V> = this;\n if (!n.left.isRed() && !n.left.left.isRed()) {\n n = n.moveRedLeft();\n }\n n = n.copy(null, null, null, (n.left as LLRBNode<K, V>).removeMin(), null);\n return n.fixUp();\n }\n\n // Returns new tree, with the specified item removed.\n remove(\n key: K,\n comparator: Comparator<K>\n ): LLRBNode<K, V> | LLRBEmptyNode<K, V> {\n let smallest: LLRBNode<K, V>;\n let n: LLRBNode<K, V> = this;\n if (comparator(key, n.key) < 0) {\n if (!n.left.isEmpty() && !n.left.isRed() && !n.left.left.isRed()) {\n n = n.moveRedLeft();\n }\n n = n.copy(null, null, null, n.left.remove(key, comparator), null);\n } else {\n if (n.left.isRed()) {\n n = n.rotateRight();\n }\n if (!n.right.isEmpty() && !n.right.isRed() && !n.right.left.isRed()) {\n n = n.moveRedRight();\n }\n if (comparator(key, n.key) === 0) {\n if (n.right.isEmpty()) {\n return LLRBNode.EMPTY;\n } else {\n smallest = (n.right as LLRBNode<K, V>).min();\n n = n.copy(\n smallest.key,\n smallest.value,\n null,\n null,\n (n.right as LLRBNode<K, V>).removeMin()\n );\n }\n }\n n = n.copy(null, null, null, null, n.right.remove(key, comparator));\n }\n return n.fixUp();\n }\n\n isRed(): boolean {\n return this.color;\n }\n\n // Returns new tree after performing any needed rotations.\n private fixUp(): LLRBNode<K, V> {\n let n: LLRBNode<K, V> = this;\n if (n.right.isRed() && !n.left.isRed()) {\n n = n.rotateLeft();\n }\n if (n.left.isRed() && n.left.left.isRed()) {\n n = n.rotateRight();\n }\n if (n.left.isRed() && n.right.isRed()) {\n n = n.colorFlip();\n }\n return n;\n }\n\n private moveRedLeft(): LLRBNode<K, V> {\n let n = this.colorFlip();\n if (n.right.left.isRed()) {\n n = n.copy(\n null,\n null,\n null,\n null,\n (n.right as LLRBNode<K, V>).rotateRight()\n );\n n = n.rotateLeft();\n n = n.colorFlip();\n }\n return n;\n }\n\n private moveRedRight(): LLRBNode<K, V> {\n let n = this.colorFlip();\n if (n.left.left.isRed()) {\n n = n.rotateRight();\n n = n.colorFlip();\n }\n return n;\n }\n\n private rotateLeft(): LLRBNode<K, V> {\n const nl = this.copy(null, null, LLRBNode.RED, null, this.right.left);\n return (this.right as LLRBNode<K, V>).copy(\n null,\n null,\n this.color,\n nl,\n null\n );\n }\n\n private rotateRight(): LLRBNode<K, V> {\n const nr = this.copy(null, null, LLRBNode.RED, this.left.right, null);\n return (this.left as LLRBNode<K, V>).copy(null, null, this.color, null, nr);\n }\n\n private colorFlip(): LLRBNode<K, V> {\n const left = this.left.copy(null, null, !this.left.color, null, null);\n const right = this.right.copy(null, null, !this.right.color, null, null);\n return this.copy(null, null, !this.color, left, right);\n }\n\n // For testing.\n checkMaxDepth(): boolean {\n const blackDepth = this.check();\n if (Math.pow(2.0, blackDepth) <= this.size + 1) {\n return true;\n } else {\n return false;\n }\n }\n\n // In a balanced RB tree, the black-depth (number of black nodes) from root to\n // leaves is equal on both sides. This function verifies that or asserts.\n protected check(): number {\n if (this.isRed() && this.left.isRed()) {\n throw fail('Red node has red child(' + this.key + ',' + this.value + ')');\n }\n if (this.right.isRed()) {\n throw fail('Right child of (' + this.key + ',' + this.value + ') is red');\n }\n const blackDepth = (this.left as LLRBNode<K, V>).check();\n if (blackDepth !== (this.right as LLRBNode<K, V>).check()) {\n throw fail('Black depths differ');\n } else {\n return blackDepth + (this.isRed() ? 0 : 1);\n }\n }\n} // end LLRBNode\n\n// Represents an empty node (a leaf node in the Red-Black Tree).\nexport class LLRBEmptyNode<K, V> {\n get key(): never {\n throw fail('LLRBEmptyNode has no key.');\n }\n get value(): never {\n throw fail('LLRBEmptyNode has no value.');\n }\n get color(): never {\n throw fail('LLRBEmptyNode has no color.');\n }\n get left(): never {\n throw fail('LLRBEmptyNode has no left child.');\n }\n get right(): never {\n throw fail('LLRBEmptyNode has no right child.');\n }\n size = 0;\n\n // Returns a copy of the current node.\n copy(\n key: K | null,\n value: V | null,\n color: boolean | null,\n left: LLRBNode<K, V> | LLRBEmptyNode<K, V> | null,\n right: LLRBNode<K, V> | LLRBEmptyNode<K, V> | null\n ): LLRBEmptyNode<K, V> {\n return this;\n }\n\n // Returns a copy of the tree, with the specified key/value added.\n insert(key: K, value: V, comparator: Comparator<K>): LLRBNode<K, V> {\n return new LLRBNode<K, V>(key, value);\n }\n\n // Returns a copy of the tree, with the specified key removed.\n remove(key: K, comparator: Comparator<K>): LLRBEmptyNode<K, V> {\n return this;\n }\n\n isEmpty(): boolean {\n return true;\n }\n\n inorderTraversal(action: (k: K, v: V) => boolean): boolean {\n return false;\n }\n\n reverseTraversal(action: (k: K, v: V) => boolean): boolean {\n return false;\n }\n\n minKey(): K | null {\n return null;\n }\n\n maxKey(): K | null {\n return null;\n }\n\n isRed(): boolean {\n return false;\n }\n\n // For testing.\n checkMaxDepth(): boolean {\n return true;\n }\n\n protected check(): 0 {\n return 0;\n }\n} // end LLRBEmptyNode\n\nLLRBNode.EMPTY = new LLRBEmptyNode<unknown, unknown>();\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SortedMap, SortedMapIterator } from './sorted_map';\n\n/**\n * SortedSet is an immutable (copy-on-write) collection that holds elements\n * in order specified by the provided comparator.\n *\n * NOTE: if provided comparator returns 0 for two elements, we consider them to\n * be equal!\n */\nexport class SortedSet<T> {\n private data: SortedMap<T, boolean>;\n\n constructor(private comparator: (left: T, right: T) => number) {\n this.data = new SortedMap<T, boolean>(this.comparator);\n }\n\n has(elem: T): boolean {\n return this.data.get(elem) !== null;\n }\n\n first(): T | null {\n return this.data.minKey();\n }\n\n last(): T | null {\n return this.data.maxKey();\n }\n\n get size(): number {\n return this.data.size;\n }\n\n indexOf(elem: T): number {\n return this.data.indexOf(elem);\n }\n\n /** Iterates elements in order defined by \"comparator\" */\n forEach(cb: (elem: T) => void): void {\n this.data.inorderTraversal((k: T, v: boolean) => {\n cb(k);\n return false;\n });\n }\n\n /** Iterates over `elem`s such that: range[0] <= elem < range[1]. */\n forEachInRange(range: [T, T], cb: (elem: T) => void): void {\n const iter = this.data.getIteratorFrom(range[0]);\n while (iter.hasNext()) {\n const elem = iter.getNext();\n if (this.comparator(elem.key, range[1]) >= 0) {\n return;\n }\n cb(elem.key);\n }\n }\n\n /**\n * Iterates over `elem`s such that: start <= elem until false is returned.\n */\n forEachWhile(cb: (elem: T) => boolean, start?: T): void {\n let iter: SortedMapIterator<T, boolean>;\n if (start !== undefined) {\n iter = this.data.getIteratorFrom(start);\n } else {\n iter = this.data.getIterator();\n }\n while (iter.hasNext()) {\n const elem = iter.getNext();\n const result = cb(elem.key);\n if (!result) {\n return;\n }\n }\n }\n\n /** Finds the least element greater than or equal to `elem`. */\n firstAfterOrEqual(elem: T): T | null {\n const iter = this.data.getIteratorFrom(elem);\n return iter.hasNext() ? iter.getNext().key : null;\n }\n\n getIterator(): SortedSetIterator<T> {\n return new SortedSetIterator<T>(this.data.getIterator());\n }\n\n getIteratorFrom(key: T): SortedSetIterator<T> {\n return new SortedSetIterator<T>(this.data.getIteratorFrom(key));\n }\n\n /** Inserts or updates an element */\n add(elem: T): SortedSet<T> {\n return this.copy(this.data.remove(elem).insert(elem, true));\n }\n\n /** Deletes an element */\n delete(elem: T): SortedSet<T> {\n if (!this.has(elem)) {\n return this;\n }\n return this.copy(this.data.remove(elem));\n }\n\n isEmpty(): boolean {\n return this.data.isEmpty();\n }\n\n unionWith(other: SortedSet<T>): SortedSet<T> {\n let result: SortedSet<T> = this;\n\n // Make sure `result` always refers to the larger one of the two sets.\n if (result.size < other.size) {\n result = other;\n other = this;\n }\n\n other.forEach(elem => {\n result = result.add(elem);\n });\n return result;\n }\n\n isEqual(other: SortedSet<T>): boolean {\n if (!(other instanceof SortedSet)) {\n return false;\n }\n if (this.size !== other.size) {\n return false;\n }\n\n const thisIt = this.data.getIterator();\n const otherIt = other.data.getIterator();\n while (thisIt.hasNext()) {\n const thisElem = thisIt.getNext().key;\n const otherElem = otherIt.getNext().key;\n if (this.comparator(thisElem, otherElem) !== 0) {\n return false;\n }\n }\n return true;\n }\n\n toArray(): T[] {\n const res: T[] = [];\n this.forEach(targetId => {\n res.push(targetId);\n });\n return res;\n }\n\n toString(): string {\n const result: T[] = [];\n this.forEach(elem => result.push(elem));\n return 'SortedSet(' + result.toString() + ')';\n }\n\n private copy(data: SortedMap<T, boolean>): SortedSet<T> {\n const result = new SortedSet(this.comparator);\n result.data = data;\n return result;\n }\n}\n\nexport class SortedSetIterator<T> {\n constructor(private iter: SortedMapIterator<T, boolean>) {}\n\n getNext(): T {\n return this.iter.getNext().key;\n }\n\n hasNext(): boolean {\n return this.iter.hasNext();\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { SortedMap } from '../util/sorted_map';\nimport { SortedSet } from '../util/sorted_set';\n\nimport { TargetId } from '../core/types';\nimport { primitiveComparator } from '../util/misc';\nimport { Document, MaybeDocument } from './document';\nimport { DocumentKey } from './document_key';\n\n/** Miscellaneous collection types / constants. */\nexport interface DocumentSizeEntry {\n maybeDocument: MaybeDocument;\n size: number;\n}\n\nexport type MaybeDocumentMap = SortedMap<DocumentKey, MaybeDocument>;\nconst EMPTY_MAYBE_DOCUMENT_MAP = new SortedMap<DocumentKey, MaybeDocument>(\n DocumentKey.comparator\n);\nexport function maybeDocumentMap(): MaybeDocumentMap {\n return EMPTY_MAYBE_DOCUMENT_MAP;\n}\n\nexport type NullableMaybeDocumentMap = SortedMap<\n DocumentKey,\n MaybeDocument | null\n>;\n\nexport function nullableMaybeDocumentMap(): NullableMaybeDocumentMap {\n return maybeDocumentMap();\n}\n\nexport interface DocumentSizeEntries {\n maybeDocuments: NullableMaybeDocumentMap;\n sizeMap: SortedMap<DocumentKey, number>;\n}\n\nexport type DocumentMap = SortedMap<DocumentKey, Document>;\nconst EMPTY_DOCUMENT_MAP = new SortedMap<DocumentKey, Document>(\n DocumentKey.comparator\n);\nexport function documentMap(): DocumentMap {\n return EMPTY_DOCUMENT_MAP;\n}\n\nexport type DocumentVersionMap = SortedMap<DocumentKey, SnapshotVersion>;\nconst EMPTY_DOCUMENT_VERSION_MAP = new SortedMap<DocumentKey, SnapshotVersion>(\n DocumentKey.comparator\n);\nexport function documentVersionMap(): DocumentVersionMap {\n return EMPTY_DOCUMENT_VERSION_MAP;\n}\n\nexport type DocumentKeySet = SortedSet<DocumentKey>;\nconst EMPTY_DOCUMENT_KEY_SET = new SortedSet(DocumentKey.comparator);\nexport function documentKeySet(...keys: DocumentKey[]): DocumentKeySet {\n let set = EMPTY_DOCUMENT_KEY_SET;\n for (const key of keys) {\n set = set.add(key);\n }\n return set;\n}\n\nexport type TargetIdSet = SortedSet<TargetId>;\nconst EMPTY_TARGET_ID_SET = new SortedSet<TargetId>(primitiveComparator);\nexport function targetIdSet(): SortedSet<TargetId> {\n return EMPTY_TARGET_ID_SET;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SortedMap } from '../util/sorted_map';\n\nimport { documentMap } from './collections';\nimport { Document } from './document';\nimport { DocumentComparator } from './document_comparator';\nimport { DocumentKey } from './document_key';\n\n/**\n * DocumentSet is an immutable (copy-on-write) collection that holds documents\n * in order specified by the provided comparator. We always add a document key\n * comparator on top of what is provided to guarantee document equality based on\n * the key.\n */\n\nexport class DocumentSet {\n /**\n * Returns an empty copy of the existing DocumentSet, using the same\n * comparator.\n */\n static emptySet(oldSet: DocumentSet): DocumentSet {\n return new DocumentSet(oldSet.comparator);\n }\n\n private comparator: DocumentComparator;\n private keyedMap: SortedMap<DocumentKey, Document>;\n private sortedSet: SortedMap<Document, null>;\n\n /** The default ordering is by key if the comparator is omitted */\n constructor(comp?: DocumentComparator) {\n // We are adding document key comparator to the end as it's the only\n // guaranteed unique property of a document.\n if (comp) {\n this.comparator = (d1: Document, d2: Document) =>\n comp(d1, d2) || DocumentKey.comparator(d1.key, d2.key);\n } else {\n this.comparator = (d1: Document, d2: Document) =>\n DocumentKey.comparator(d1.key, d2.key);\n }\n\n this.keyedMap = documentMap();\n this.sortedSet = new SortedMap<Document, null>(this.comparator);\n }\n\n has(key: DocumentKey): boolean {\n return this.keyedMap.get(key) != null;\n }\n\n get(key: DocumentKey): Document | null {\n return this.keyedMap.get(key);\n }\n\n first(): Document | null {\n return this.sortedSet.minKey();\n }\n\n last(): Document | null {\n return this.sortedSet.maxKey();\n }\n\n isEmpty(): boolean {\n return this.sortedSet.isEmpty();\n }\n\n /**\n * Returns the index of the provided key in the document set, or -1 if the\n * document key is not present in the set;\n */\n indexOf(key: DocumentKey): number {\n const doc = this.keyedMap.get(key);\n return doc ? this.sortedSet.indexOf(doc) : -1;\n }\n\n get size(): number {\n return this.sortedSet.size;\n }\n\n /** Iterates documents in order defined by \"comparator\" */\n forEach(cb: (doc: Document) => void): void {\n this.sortedSet.inorderTraversal((k, v) => {\n cb(k);\n return false;\n });\n }\n\n /** Inserts or updates a document with the same key */\n add(doc: Document): DocumentSet {\n // First remove the element if we have it.\n const set = this.delete(doc.key);\n return set.copy(\n set.keyedMap.insert(doc.key, doc),\n set.sortedSet.insert(doc, null)\n );\n }\n\n /** Deletes a document with a given key */\n delete(key: DocumentKey): DocumentSet {\n const doc = this.get(key);\n if (!doc) {\n return this;\n }\n\n return this.copy(this.keyedMap.remove(key), this.sortedSet.remove(doc));\n }\n\n isEqual(other: DocumentSet | null | undefined): boolean {\n if (!(other instanceof DocumentSet)) {\n return false;\n }\n if (this.size !== other.size) {\n return false;\n }\n\n const thisIt = this.sortedSet.getIterator();\n const otherIt = other.sortedSet.getIterator();\n while (thisIt.hasNext()) {\n const thisDoc = thisIt.getNext().key;\n const otherDoc = otherIt.getNext().key;\n if (!thisDoc.isEqual(otherDoc)) {\n return false;\n }\n }\n return true;\n }\n\n toString(): string {\n const docStrings: string[] = [];\n this.forEach(doc => {\n docStrings.push(doc.toString());\n });\n if (docStrings.length === 0) {\n return 'DocumentSet ()';\n } else {\n return 'DocumentSet (\\n ' + docStrings.join(' \\n') + '\\n)';\n }\n }\n\n private copy(\n keyedMap: SortedMap<DocumentKey, Document>,\n sortedSet: SortedMap<Document, null>\n ): DocumentSet {\n const newSet = new DocumentSet();\n newSet.comparator = this.comparator;\n newSet.keyedMap = keyedMap;\n newSet.sortedSet = sortedSet;\n return newSet;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Document } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { DocumentSet } from '../model/document_set';\nimport { fail } from '../util/assert';\nimport { SortedMap } from '../util/sorted_map';\n\nimport { DocumentKeySet } from '../model/collections';\nimport { Query } from './query';\n\nexport const enum ChangeType {\n Added,\n Removed,\n Modified,\n Metadata\n}\n\nexport interface DocumentViewChange {\n type: ChangeType;\n doc: Document;\n}\n\nexport const enum SyncState {\n Local,\n Synced\n}\n\n/**\n * DocumentChangeSet keeps track of a set of changes to docs in a query, merging\n * duplicate events for the same doc.\n */\nexport class DocumentChangeSet {\n private changeMap = new SortedMap<DocumentKey, DocumentViewChange>(\n DocumentKey.comparator\n );\n\n track(change: DocumentViewChange): void {\n const key = change.doc.key;\n const oldChange = this.changeMap.get(key);\n if (!oldChange) {\n this.changeMap = this.changeMap.insert(key, change);\n return;\n }\n\n // Merge the new change with the existing change.\n if (\n change.type !== ChangeType.Added &&\n oldChange.type === ChangeType.Metadata\n ) {\n this.changeMap = this.changeMap.insert(key, change);\n } else if (\n change.type === ChangeType.Metadata &&\n oldChange.type !== ChangeType.Removed\n ) {\n this.changeMap = this.changeMap.insert(key, {\n type: oldChange.type,\n doc: change.doc\n });\n } else if (\n change.type === ChangeType.Modified &&\n oldChange.type === ChangeType.Modified\n ) {\n this.changeMap = this.changeMap.insert(key, {\n type: ChangeType.Modified,\n doc: change.doc\n });\n } else if (\n change.type === ChangeType.Modified &&\n oldChange.type === ChangeType.Added\n ) {\n this.changeMap = this.changeMap.insert(key, {\n type: ChangeType.Added,\n doc: change.doc\n });\n } else if (\n change.type === ChangeType.Removed &&\n oldChange.type === ChangeType.Added\n ) {\n this.changeMap = this.changeMap.remove(key);\n } else if (\n change.type === ChangeType.Removed &&\n oldChange.type === ChangeType.Modified\n ) {\n this.changeMap = this.changeMap.insert(key, {\n type: ChangeType.Removed,\n doc: oldChange.doc\n });\n } else if (\n change.type === ChangeType.Added &&\n oldChange.type === ChangeType.Removed\n ) {\n this.changeMap = this.changeMap.insert(key, {\n type: ChangeType.Modified,\n doc: change.doc\n });\n } else {\n // This includes these cases, which don't make sense:\n // Added->Added\n // Removed->Removed\n // Modified->Added\n // Removed->Modified\n // Metadata->Added\n // Removed->Metadata\n fail(\n 'unsupported combination of changes: ' +\n JSON.stringify(change) +\n ' after ' +\n JSON.stringify(oldChange)\n );\n }\n }\n\n getChanges(): DocumentViewChange[] {\n const changes: DocumentViewChange[] = [];\n this.changeMap.inorderTraversal(\n (key: DocumentKey, change: DocumentViewChange) => {\n changes.push(change);\n }\n );\n return changes;\n }\n}\n\nexport class ViewSnapshot {\n constructor(\n readonly query: Query,\n readonly docs: DocumentSet,\n readonly oldDocs: DocumentSet,\n readonly docChanges: DocumentViewChange[],\n readonly mutatedKeys: DocumentKeySet,\n readonly fromCache: boolean,\n readonly syncStateChanged: boolean,\n readonly excludesMetadataChanges: boolean\n ) {}\n\n /** Returns a view snapshot as if all documents in the snapshot were added. */\n static fromInitialDocuments(\n query: Query,\n documents: DocumentSet,\n mutatedKeys: DocumentKeySet,\n fromCache: boolean\n ): ViewSnapshot {\n const changes: DocumentViewChange[] = [];\n documents.forEach(doc => {\n changes.push({ type: ChangeType.Added, doc });\n });\n\n return new ViewSnapshot(\n query,\n documents,\n DocumentSet.emptySet(documents),\n changes,\n mutatedKeys,\n fromCache,\n /* syncStateChanged= */ true,\n /* excludesMetadataChanges= */ false\n );\n }\n\n get hasPendingWrites(): boolean {\n return !this.mutatedKeys.isEmpty();\n }\n\n isEqual(other: ViewSnapshot): boolean {\n if (\n this.fromCache !== other.fromCache ||\n this.syncStateChanged !== other.syncStateChanged ||\n !this.mutatedKeys.isEqual(other.mutatedKeys) ||\n !this.query.isEqual(other.query) ||\n !this.docs.isEqual(other.docs) ||\n !this.oldDocs.isEqual(other.oldDocs)\n ) {\n return false;\n }\n const changes: DocumentViewChange[] = this.docChanges;\n const otherChanges: DocumentViewChange[] = other.docChanges;\n if (changes.length !== otherChanges.length) {\n return false;\n }\n for (let i = 0; i < changes.length; i++) {\n if (\n changes[i].type !== otherChanges[i].type ||\n !changes[i].doc.isEqual(otherChanges[i].doc)\n ) {\n return false;\n }\n }\n return true;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { TargetId } from '../core/types';\nimport {\n documentKeySet,\n DocumentKeySet,\n maybeDocumentMap,\n MaybeDocumentMap,\n targetIdSet\n} from '../model/collections';\nimport { SortedSet } from '../util/sorted_set';\nimport { ByteString } from '../util/byte_string';\n\n/**\n * An event from the RemoteStore. It is split into targetChanges (changes to the\n * state or the set of documents in our watched targets) and documentUpdates\n * (changes to the actual documents).\n */\nexport class RemoteEvent {\n constructor(\n /**\n * The snapshot version this event brings us up to, or MIN if not set.\n */\n readonly snapshotVersion: SnapshotVersion,\n /**\n * A map from target to changes to the target. See TargetChange.\n */\n readonly targetChanges: Map<TargetId, TargetChange>,\n /**\n * A set of targets that is known to be inconsistent. Listens for these\n * targets should be re-established without resume tokens.\n */\n readonly targetMismatches: SortedSet<TargetId>,\n /**\n * A set of which documents have changed or been deleted, along with the\n * doc's new values (if not deleted).\n */\n readonly documentUpdates: MaybeDocumentMap,\n /**\n * A set of which document updates are due only to limbo resolution targets.\n */\n readonly resolvedLimboDocuments: DocumentKeySet\n ) {}\n\n /**\n * HACK: Views require RemoteEvents in order to determine whether the view is\n * CURRENT, but secondary tabs don't receive remote events. So this method is\n * used to create a synthesized RemoteEvent that can be used to apply a\n * CURRENT status change to a View, for queries executed in a different tab.\n */\n // PORTING NOTE: Multi-tab only\n static createSynthesizedRemoteEventForCurrentChange(\n targetId: TargetId,\n current: boolean\n ): RemoteEvent {\n const targetChanges = new Map<TargetId, TargetChange>();\n targetChanges.set(\n targetId,\n TargetChange.createSynthesizedTargetChangeForCurrentChange(\n targetId,\n current\n )\n );\n return new RemoteEvent(\n SnapshotVersion.min(),\n targetChanges,\n targetIdSet(),\n maybeDocumentMap(),\n documentKeySet()\n );\n }\n}\n\n/**\n * A TargetChange specifies the set of changes for a specific target as part of\n * a RemoteEvent. These changes track which documents are added, modified or\n * removed, as well as the target's resume token and whether the target is\n * marked CURRENT.\n * The actual changes *to* documents are not part of the TargetChange since\n * documents may be part of multiple targets.\n */\nexport class TargetChange {\n constructor(\n /**\n * An opaque, server-assigned token that allows watching a query to be resumed\n * after disconnecting without retransmitting all the data that matches the\n * query. The resume token essentially identifies a point in time from which\n * the server should resume sending results.\n */\n readonly resumeToken: ByteString,\n /**\n * The \"current\" (synced) status of this target. Note that \"current\"\n * has special meaning in the RPC protocol that implies that a target is\n * both up-to-date and consistent with the rest of the watch stream.\n */\n readonly current: boolean,\n /**\n * The set of documents that were newly assigned to this target as part of\n * this remote event.\n */\n readonly addedDocuments: DocumentKeySet,\n /**\n * The set of documents that were already assigned to this target but received\n * an update during this remote event.\n */\n readonly modifiedDocuments: DocumentKeySet,\n /**\n * The set of documents that were removed from this target as part of this\n * remote event.\n */\n readonly removedDocuments: DocumentKeySet\n ) {}\n\n /**\n * This method is used to create a synthesized TargetChanges that can be used to\n * apply a CURRENT status change to a View (for queries executed in a different\n * tab) or for new queries (to raise snapshots with correct CURRENT status).\n */\n static createSynthesizedTargetChangeForCurrentChange(\n targetId: TargetId,\n current: boolean\n ): TargetChange {\n return new TargetChange(\n ByteString.EMPTY_BYTE_STRING,\n current,\n documentKeySet(),\n documentKeySet(),\n documentKeySet()\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { TargetId } from '../core/types';\nimport { ChangeType } from '../core/view_snapshot';\nimport { TargetData, TargetPurpose } from '../local/target_data';\nimport {\n documentKeySet,\n DocumentKeySet,\n maybeDocumentMap\n} from '../model/collections';\nimport { Document, MaybeDocument, NoDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { debugAssert, fail, hardAssert } from '../util/assert';\nimport { FirestoreError } from '../util/error';\nimport { logDebug } from '../util/log';\nimport { primitiveComparator } from '../util/misc';\nimport { SortedMap } from '../util/sorted_map';\nimport { SortedSet } from '../util/sorted_set';\nimport { ExistenceFilter } from './existence_filter';\nimport { RemoteEvent, TargetChange } from './remote_event';\nimport { ByteString } from '../util/byte_string';\n\n/**\n * Internal representation of the watcher API protocol buffers.\n */\nexport type WatchChange =\n | DocumentWatchChange\n | WatchTargetChange\n | ExistenceFilterChange;\n\n/**\n * Represents a changed document and a list of target ids to which this change\n * applies.\n *\n * If document has been deleted NoDocument will be provided.\n */\nexport class DocumentWatchChange {\n constructor(\n /** The new document applies to all of these targets. */\n public updatedTargetIds: TargetId[],\n /** The new document is removed from all of these targets. */\n public removedTargetIds: TargetId[],\n /** The key of the document for this change. */\n public key: DocumentKey,\n /**\n * The new document or NoDocument if it was deleted. Is null if the\n * document went out of view without the server sending a new document.\n */\n public newDoc: MaybeDocument | null\n ) {}\n}\n\nexport class ExistenceFilterChange {\n constructor(\n public targetId: TargetId,\n public existenceFilter: ExistenceFilter\n ) {}\n}\n\nexport const enum WatchTargetChangeState {\n NoChange,\n Added,\n Removed,\n Current,\n Reset\n}\n\nexport class WatchTargetChange {\n constructor(\n /** What kind of change occurred to the watch target. */\n public state: WatchTargetChangeState,\n /** The target IDs that were added/removed/set. */\n public targetIds: TargetId[],\n /**\n * An opaque, server-assigned token that allows watching a target to be\n * resumed after disconnecting without retransmitting all the data that\n * matches the target. The resume token essentially identifies a point in\n * time from which the server should resume sending results.\n */\n public resumeToken: ByteString = ByteString.EMPTY_BYTE_STRING,\n /** An RPC error indicating why the watch failed. */\n public cause: FirestoreError | null = null\n ) {}\n}\n\n/** Tracks the internal state of a Watch target. */\nclass TargetState {\n /**\n * The number of pending responses (adds or removes) that we are waiting on.\n * We only consider targets active that have no pending responses.\n */\n private pendingResponses = 0;\n\n /**\n * Keeps track of the document changes since the last raised snapshot.\n *\n * These changes are continuously updated as we receive document updates and\n * always reflect the current set of changes against the last issued snapshot.\n */\n private documentChanges: SortedMap<\n DocumentKey,\n ChangeType\n > = snapshotChangesMap();\n\n /** See public getters for explanations of these fields. */\n private _resumeToken: ByteString = ByteString.EMPTY_BYTE_STRING;\n private _current = false;\n\n /**\n * Whether this target state should be included in the next snapshot. We\n * initialize to true so that newly-added targets are included in the next\n * RemoteEvent.\n */\n private _hasPendingChanges = true;\n\n /**\n * Whether this target has been marked 'current'.\n *\n * 'Current' has special meaning in the RPC protocol: It implies that the\n * Watch backend has sent us all changes up to the point at which the target\n * was added and that the target is consistent with the rest of the watch\n * stream.\n */\n get current(): boolean {\n return this._current;\n }\n\n /** The last resume token sent to us for this target. */\n get resumeToken(): ByteString {\n return this._resumeToken;\n }\n\n /** Whether this target has pending target adds or target removes. */\n get isPending(): boolean {\n return this.pendingResponses !== 0;\n }\n\n /** Whether we have modified any state that should trigger a snapshot. */\n get hasPendingChanges(): boolean {\n return this._hasPendingChanges;\n }\n\n /**\n * Applies the resume token to the TargetChange, but only when it has a new\n * value. Empty resumeTokens are discarded.\n */\n updateResumeToken(resumeToken: ByteString): void {\n if (resumeToken.approximateByteSize() > 0) {\n this._hasPendingChanges = true;\n this._resumeToken = resumeToken;\n }\n }\n\n /**\n * Creates a target change from the current set of changes.\n *\n * To reset the document changes after raising this snapshot, call\n * `clearPendingChanges()`.\n */\n toTargetChange(): TargetChange {\n let addedDocuments = documentKeySet();\n let modifiedDocuments = documentKeySet();\n let removedDocuments = documentKeySet();\n\n this.documentChanges.forEach((key, changeType) => {\n switch (changeType) {\n case ChangeType.Added:\n addedDocuments = addedDocuments.add(key);\n break;\n case ChangeType.Modified:\n modifiedDocuments = modifiedDocuments.add(key);\n break;\n case ChangeType.Removed:\n removedDocuments = removedDocuments.add(key);\n break;\n default:\n fail('Encountered invalid change type: ' + changeType);\n }\n });\n\n return new TargetChange(\n this._resumeToken,\n this._current,\n addedDocuments,\n modifiedDocuments,\n removedDocuments\n );\n }\n\n /**\n * Resets the document changes and sets `hasPendingChanges` to false.\n */\n clearPendingChanges(): void {\n this._hasPendingChanges = false;\n this.documentChanges = snapshotChangesMap();\n }\n\n addDocumentChange(key: DocumentKey, changeType: ChangeType): void {\n this._hasPendingChanges = true;\n this.documentChanges = this.documentChanges.insert(key, changeType);\n }\n\n removeDocumentChange(key: DocumentKey): void {\n this._hasPendingChanges = true;\n this.documentChanges = this.documentChanges.remove(key);\n }\n\n recordPendingTargetRequest(): void {\n this.pendingResponses += 1;\n }\n\n recordTargetResponse(): void {\n this.pendingResponses -= 1;\n }\n\n markCurrent(): void {\n this._hasPendingChanges = true;\n this._current = true;\n }\n}\n\n/**\n * Interface implemented by RemoteStore to expose target metadata to the\n * WatchChangeAggregator.\n */\nexport interface TargetMetadataProvider {\n /**\n * Returns the set of remote document keys for the given target ID as of the\n * last raised snapshot.\n */\n getRemoteKeysForTarget(targetId: TargetId): DocumentKeySet;\n\n /**\n * Returns the TargetData for an active target ID or 'null' if this target\n * has become inactive\n */\n getTargetDataForTarget(targetId: TargetId): TargetData | null;\n}\n\nconst LOG_TAG = 'WatchChangeAggregator';\n\n/**\n * A helper class to accumulate watch changes into a RemoteEvent.\n */\nexport class WatchChangeAggregator {\n constructor(private metadataProvider: TargetMetadataProvider) {}\n\n /** The internal state of all tracked targets. */\n private targetStates = new Map<TargetId, TargetState>();\n\n /** Keeps track of the documents to update since the last raised snapshot. */\n private pendingDocumentUpdates = maybeDocumentMap();\n\n /** A mapping of document keys to their set of target IDs. */\n private pendingDocumentTargetMapping = documentTargetMap();\n\n /**\n * A list of targets with existence filter mismatches. These targets are\n * known to be inconsistent and their listens needs to be re-established by\n * RemoteStore.\n */\n private pendingTargetResets = new SortedSet<TargetId>(primitiveComparator);\n\n /**\n * Processes and adds the DocumentWatchChange to the current set of changes.\n */\n handleDocumentChange(docChange: DocumentWatchChange): void {\n for (const targetId of docChange.updatedTargetIds) {\n if (docChange.newDoc instanceof Document) {\n this.addDocumentToTarget(targetId, docChange.newDoc);\n } else if (docChange.newDoc instanceof NoDocument) {\n this.removeDocumentFromTarget(\n targetId,\n docChange.key,\n docChange.newDoc\n );\n }\n }\n\n for (const targetId of docChange.removedTargetIds) {\n this.removeDocumentFromTarget(targetId, docChange.key, docChange.newDoc);\n }\n }\n\n /** Processes and adds the WatchTargetChange to the current set of changes. */\n handleTargetChange(targetChange: WatchTargetChange): void {\n this.forEachTarget(targetChange, targetId => {\n const targetState = this.ensureTargetState(targetId);\n switch (targetChange.state) {\n case WatchTargetChangeState.NoChange:\n if (this.isActiveTarget(targetId)) {\n targetState.updateResumeToken(targetChange.resumeToken);\n }\n break;\n case WatchTargetChangeState.Added:\n // We need to decrement the number of pending acks needed from watch\n // for this targetId.\n targetState.recordTargetResponse();\n if (!targetState.isPending) {\n // We have a freshly added target, so we need to reset any state\n // that we had previously. This can happen e.g. when remove and add\n // back a target for existence filter mismatches.\n targetState.clearPendingChanges();\n }\n targetState.updateResumeToken(targetChange.resumeToken);\n break;\n case WatchTargetChangeState.Removed:\n // We need to keep track of removed targets to we can post-filter and\n // remove any target changes.\n // We need to decrement the number of pending acks needed from watch\n // for this targetId.\n targetState.recordTargetResponse();\n if (!targetState.isPending) {\n this.removeTarget(targetId);\n }\n debugAssert(\n !targetChange.cause,\n 'WatchChangeAggregator does not handle errored targets'\n );\n break;\n case WatchTargetChangeState.Current:\n if (this.isActiveTarget(targetId)) {\n targetState.markCurrent();\n targetState.updateResumeToken(targetChange.resumeToken);\n }\n break;\n case WatchTargetChangeState.Reset:\n if (this.isActiveTarget(targetId)) {\n // Reset the target and synthesizes removes for all existing\n // documents. The backend will re-add any documents that still\n // match the target before it sends the next global snapshot.\n this.resetTarget(targetId);\n targetState.updateResumeToken(targetChange.resumeToken);\n }\n break;\n default:\n fail('Unknown target watch change state: ' + targetChange.state);\n }\n });\n }\n\n /**\n * Iterates over all targetIds that the watch change applies to: either the\n * targetIds explicitly listed in the change or the targetIds of all currently\n * active targets.\n */\n forEachTarget(\n targetChange: WatchTargetChange,\n fn: (targetId: TargetId) => void\n ): void {\n if (targetChange.targetIds.length > 0) {\n targetChange.targetIds.forEach(fn);\n } else {\n this.targetStates.forEach((_, targetId) => {\n if (this.isActiveTarget(targetId)) {\n fn(targetId);\n }\n });\n }\n }\n\n /**\n * Handles existence filters and synthesizes deletes for filter mismatches.\n * Targets that are invalidated by filter mismatches are added to\n * `pendingTargetResets`.\n */\n handleExistenceFilter(watchChange: ExistenceFilterChange): void {\n const targetId = watchChange.targetId;\n const expectedCount = watchChange.existenceFilter.count;\n\n const targetData = this.targetDataForActiveTarget(targetId);\n if (targetData) {\n const target = targetData.target;\n if (target.isDocumentQuery()) {\n if (expectedCount === 0) {\n // The existence filter told us the document does not exist. We deduce\n // that this document does not exist and apply a deleted document to\n // our updates. Without applying this deleted document there might be\n // another query that will raise this document as part of a snapshot\n // until it is resolved, essentially exposing inconsistency between\n // queries.\n const key = new DocumentKey(target.path);\n this.removeDocumentFromTarget(\n targetId,\n key,\n new NoDocument(key, SnapshotVersion.min())\n );\n } else {\n hardAssert(\n expectedCount === 1,\n 'Single document existence filter with count: ' + expectedCount\n );\n }\n } else {\n const currentSize = this.getCurrentDocumentCountForTarget(targetId);\n if (currentSize !== expectedCount) {\n // Existence filter mismatch: We reset the mapping and raise a new\n // snapshot with `isFromCache:true`.\n this.resetTarget(targetId);\n this.pendingTargetResets = this.pendingTargetResets.add(targetId);\n }\n }\n }\n }\n\n /**\n * Converts the currently accumulated state into a remote event at the\n * provided snapshot version. Resets the accumulated changes before returning.\n */\n createRemoteEvent(snapshotVersion: SnapshotVersion): RemoteEvent {\n const targetChanges = new Map<TargetId, TargetChange>();\n\n this.targetStates.forEach((targetState, targetId) => {\n const targetData = this.targetDataForActiveTarget(targetId);\n if (targetData) {\n if (targetState.current && targetData.target.isDocumentQuery()) {\n // Document queries for document that don't exist can produce an empty\n // result set. To update our local cache, we synthesize a document\n // delete if we have not previously received the document. This\n // resolves the limbo state of the document, removing it from\n // limboDocumentRefs.\n //\n // TODO(dimond): Ideally we would have an explicit lookup target\n // instead resulting in an explicit delete message and we could\n // remove this special logic.\n const key = new DocumentKey(targetData.target.path);\n if (\n this.pendingDocumentUpdates.get(key) === null &&\n !this.targetContainsDocument(targetId, key)\n ) {\n this.removeDocumentFromTarget(\n targetId,\n key,\n new NoDocument(key, snapshotVersion)\n );\n }\n }\n\n if (targetState.hasPendingChanges) {\n targetChanges.set(targetId, targetState.toTargetChange());\n targetState.clearPendingChanges();\n }\n }\n });\n\n let resolvedLimboDocuments = documentKeySet();\n\n // We extract the set of limbo-only document updates as the GC logic\n // special-cases documents that do not appear in the target cache.\n //\n // TODO(gsoltis): Expand on this comment once GC is available in the JS\n // client.\n this.pendingDocumentTargetMapping.forEach((key, targets) => {\n let isOnlyLimboTarget = true;\n\n targets.forEachWhile(targetId => {\n const targetData = this.targetDataForActiveTarget(targetId);\n if (\n targetData &&\n targetData.purpose !== TargetPurpose.LimboResolution\n ) {\n isOnlyLimboTarget = false;\n return false;\n }\n\n return true;\n });\n\n if (isOnlyLimboTarget) {\n resolvedLimboDocuments = resolvedLimboDocuments.add(key);\n }\n });\n\n const remoteEvent = new RemoteEvent(\n snapshotVersion,\n targetChanges,\n this.pendingTargetResets,\n this.pendingDocumentUpdates,\n resolvedLimboDocuments\n );\n\n this.pendingDocumentUpdates = maybeDocumentMap();\n this.pendingDocumentTargetMapping = documentTargetMap();\n this.pendingTargetResets = new SortedSet<TargetId>(primitiveComparator);\n\n return remoteEvent;\n }\n\n /**\n * Adds the provided document to the internal list of document updates and\n * its document key to the given target's mapping.\n */\n // Visible for testing.\n addDocumentToTarget(targetId: TargetId, document: MaybeDocument): void {\n if (!this.isActiveTarget(targetId)) {\n return;\n }\n\n const changeType = this.targetContainsDocument(targetId, document.key)\n ? ChangeType.Modified\n : ChangeType.Added;\n\n const targetState = this.ensureTargetState(targetId);\n targetState.addDocumentChange(document.key, changeType);\n\n this.pendingDocumentUpdates = this.pendingDocumentUpdates.insert(\n document.key,\n document\n );\n\n this.pendingDocumentTargetMapping = this.pendingDocumentTargetMapping.insert(\n document.key,\n this.ensureDocumentTargetMapping(document.key).add(targetId)\n );\n }\n\n /**\n * Removes the provided document from the target mapping. If the\n * document no longer matches the target, but the document's state is still\n * known (e.g. we know that the document was deleted or we received the change\n * that caused the filter mismatch), the new document can be provided\n * to update the remote document cache.\n */\n // Visible for testing.\n removeDocumentFromTarget(\n targetId: TargetId,\n key: DocumentKey,\n updatedDocument: MaybeDocument | null\n ): void {\n if (!this.isActiveTarget(targetId)) {\n return;\n }\n\n const targetState = this.ensureTargetState(targetId);\n if (this.targetContainsDocument(targetId, key)) {\n targetState.addDocumentChange(key, ChangeType.Removed);\n } else {\n // The document may have entered and left the target before we raised a\n // snapshot, so we can just ignore the change.\n targetState.removeDocumentChange(key);\n }\n\n this.pendingDocumentTargetMapping = this.pendingDocumentTargetMapping.insert(\n key,\n this.ensureDocumentTargetMapping(key).delete(targetId)\n );\n\n if (updatedDocument) {\n this.pendingDocumentUpdates = this.pendingDocumentUpdates.insert(\n key,\n updatedDocument\n );\n }\n }\n\n removeTarget(targetId: TargetId): void {\n this.targetStates.delete(targetId);\n }\n\n /**\n * Returns the current count of documents in the target. This includes both\n * the number of documents that the LocalStore considers to be part of the\n * target as well as any accumulated changes.\n */\n private getCurrentDocumentCountForTarget(targetId: TargetId): number {\n const targetState = this.ensureTargetState(targetId);\n const targetChange = targetState.toTargetChange();\n return (\n this.metadataProvider.getRemoteKeysForTarget(targetId).size +\n targetChange.addedDocuments.size -\n targetChange.removedDocuments.size\n );\n }\n\n /**\n * Increment the number of acks needed from watch before we can consider the\n * server to be 'in-sync' with the client's active targets.\n */\n recordPendingTargetRequest(targetId: TargetId): void {\n // For each request we get we need to record we need a response for it.\n const targetState = this.ensureTargetState(targetId);\n targetState.recordPendingTargetRequest();\n }\n\n private ensureTargetState(targetId: TargetId): TargetState {\n let result = this.targetStates.get(targetId);\n if (!result) {\n result = new TargetState();\n this.targetStates.set(targetId, result);\n }\n return result;\n }\n\n private ensureDocumentTargetMapping(key: DocumentKey): SortedSet<TargetId> {\n let targetMapping = this.pendingDocumentTargetMapping.get(key);\n\n if (!targetMapping) {\n targetMapping = new SortedSet<TargetId>(primitiveComparator);\n this.pendingDocumentTargetMapping = this.pendingDocumentTargetMapping.insert(\n key,\n targetMapping\n );\n }\n\n return targetMapping;\n }\n\n /**\n * Verifies that the user is still interested in this target (by calling\n * `getTargetDataForTarget()`) and that we are not waiting for pending ADDs\n * from watch.\n */\n protected isActiveTarget(targetId: TargetId): boolean {\n const targetActive = this.targetDataForActiveTarget(targetId) !== null;\n if (!targetActive) {\n logDebug(LOG_TAG, 'Detected inactive target', targetId);\n }\n return targetActive;\n }\n\n /**\n * Returns the TargetData for an active target (i.e. a target that the user\n * is still interested in that has no outstanding target change requests).\n */\n protected targetDataForActiveTarget(targetId: TargetId): TargetData | null {\n const targetState = this.targetStates.get(targetId);\n return targetState && targetState.isPending\n ? null\n : this.metadataProvider.getTargetDataForTarget(targetId);\n }\n\n /**\n * Resets the state of a Watch target to its initial state (e.g. sets\n * 'current' to false, clears the resume token and removes its target mapping\n * from all documents).\n */\n private resetTarget(targetId: TargetId): void {\n debugAssert(\n !this.targetStates.get(targetId)!.isPending,\n 'Should only reset active targets'\n );\n this.targetStates.set(targetId, new TargetState());\n\n // Trigger removal for any documents currently mapped to this target.\n // These removals will be part of the initial snapshot if Watch does not\n // resend these documents.\n const existingKeys = this.metadataProvider.getRemoteKeysForTarget(targetId);\n existingKeys.forEach(key => {\n this.removeDocumentFromTarget(targetId, key, /*updatedDocument=*/ null);\n });\n }\n /**\n * Returns whether the LocalStore considers the document to be part of the\n * specified target.\n */\n private targetContainsDocument(\n targetId: TargetId,\n key: DocumentKey\n ): boolean {\n const existingKeys = this.metadataProvider.getRemoteKeysForTarget(targetId);\n return existingKeys.has(key);\n }\n}\n\nfunction documentTargetMap(): SortedMap<DocumentKey, SortedSet<TargetId>> {\n return new SortedMap<DocumentKey, SortedSet<TargetId>>(\n DocumentKey.comparator\n );\n}\n\nfunction snapshotChangesMap(): SortedMap<DocumentKey, ChangeType> {\n return new SortedMap<DocumentKey, ChangeType>(DocumentKey.comparator);\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Blob } from '../api/blob';\nimport { Timestamp } from '../api/timestamp';\nimport { DatabaseId } from '../core/database_info';\nimport {\n Bound,\n Direction,\n FieldFilter,\n Filter,\n LimitType,\n Operator,\n OrderBy,\n Query\n} from '../core/query';\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { Target } from '../core/target';\nimport { TargetId } from '../core/types';\nimport { TargetData, TargetPurpose } from '../local/target_data';\nimport { Document, MaybeDocument, NoDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { ObjectValue } from '../model/object_value';\nimport {\n DeleteMutation,\n FieldMask,\n FieldTransform,\n Mutation,\n MutationResult,\n PatchMutation,\n Precondition,\n SetMutation,\n TransformMutation,\n VerifyMutation\n} from '../model/mutation';\nimport { FieldPath, ResourcePath } from '../model/path';\nimport * as api from '../protos/firestore_proto_api';\nimport { debugAssert, fail, hardAssert } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport { ByteString } from '../util/byte_string';\nimport {\n isNegativeZero,\n isNullOrUndefined,\n isSafeInteger\n} from '../util/types';\nimport {\n ArrayRemoveTransformOperation,\n ArrayUnionTransformOperation,\n NumericIncrementTransformOperation,\n ServerTimestampTransform,\n TransformOperation\n} from '../model/transform_operation';\nimport { ExistenceFilter } from './existence_filter';\nimport { mapCodeFromRpcCode } from './rpc_error';\nimport {\n DocumentWatchChange,\n ExistenceFilterChange,\n WatchChange,\n WatchTargetChange,\n WatchTargetChangeState\n} from './watch_change';\nimport { isNanValue, isNullValue, normalizeTimestamp } from '../model/values';\n\nconst DIRECTIONS = (() => {\n const dirs: { [dir: string]: api.OrderDirection } = {};\n dirs[Direction.ASCENDING] = 'ASCENDING';\n dirs[Direction.DESCENDING] = 'DESCENDING';\n return dirs;\n})();\n\nconst OPERATORS = (() => {\n const ops: { [op: string]: api.FieldFilterOp } = {};\n ops[Operator.LESS_THAN] = 'LESS_THAN';\n ops[Operator.LESS_THAN_OR_EQUAL] = 'LESS_THAN_OR_EQUAL';\n ops[Operator.GREATER_THAN] = 'GREATER_THAN';\n ops[Operator.GREATER_THAN_OR_EQUAL] = 'GREATER_THAN_OR_EQUAL';\n ops[Operator.EQUAL] = 'EQUAL';\n ops[Operator.ARRAY_CONTAINS] = 'ARRAY_CONTAINS';\n ops[Operator.IN] = 'IN';\n ops[Operator.ARRAY_CONTAINS_ANY] = 'ARRAY_CONTAINS_ANY';\n return ops;\n})();\n\nfunction assertPresent(value: unknown, description: string): asserts value {\n debugAssert(!isNullOrUndefined(value), description + ' is missing');\n}\n\nexport interface SerializerOptions {\n /**\n * The serializer supports both Protobuf.js and Proto3 JSON formats. By\n * setting this flag to true, the serializer will use the Proto3 JSON format.\n *\n * For a description of the Proto3 JSON format check\n * https://developers.google.com/protocol-buffers/docs/proto3#json\n */\n useProto3Json: boolean;\n}\n\n/**\n * Generates JsonObject values for the Datastore API suitable for sending to\n * either GRPC stub methods or via the JSON/HTTP REST API.\n * TODO(klimt): We can remove the databaseId argument if we keep the full\n * resource name in documents.\n */\nexport class JsonProtoSerializer {\n constructor(\n private databaseId: DatabaseId,\n private options: SerializerOptions\n ) {}\n\n fromRpcStatus(status: api.Status): FirestoreError {\n const code =\n status.code === undefined\n ? Code.UNKNOWN\n : mapCodeFromRpcCode(status.code);\n return new FirestoreError(code, status.message || '');\n }\n\n /**\n * Returns a value for a number (or null) that's appropriate to put into\n * a google.protobuf.Int32Value proto.\n * DO NOT USE THIS FOR ANYTHING ELSE.\n * This method cheats. It's typed as returning \"number\" because that's what\n * our generated proto interfaces say Int32Value must be. But GRPC actually\n * expects a { value: <number> } struct.\n */\n private toInt32Proto(val: number | null): number | { value: number } | null {\n if (this.options.useProto3Json || isNullOrUndefined(val)) {\n return val;\n } else {\n return { value: val };\n }\n }\n\n /**\n * Returns a number (or null) from a google.protobuf.Int32Value proto.\n */\n private fromInt32Proto(\n val: number | { value: number } | undefined\n ): number | null {\n let result;\n if (typeof val === 'object') {\n result = val.value;\n } else {\n result = val;\n }\n return isNullOrUndefined(result) ? null : result;\n }\n\n /**\n * Returns an IntegerValue for `value`.\n */\n toInteger(value: number): api.Value {\n return { integerValue: '' + value };\n }\n\n /**\n * Returns an DoubleValue for `value` that is encoded based the serializer's\n * `useProto3Json` setting.\n */\n toDouble(value: number): api.Value {\n if (this.options.useProto3Json) {\n if (isNaN(value)) {\n return { doubleValue: 'NaN' };\n } else if (value === Infinity) {\n return { doubleValue: 'Infinity' };\n } else if (value === -Infinity) {\n return { doubleValue: '-Infinity' };\n }\n }\n return { doubleValue: isNegativeZero(value) ? '-0' : value };\n }\n\n /**\n * Returns a value for a number that's appropriate to put into a proto.\n * The return value is an IntegerValue if it can safely represent the value,\n * otherwise a DoubleValue is returned.\n */\n toNumber(value: number): api.Value {\n return isSafeInteger(value) ? this.toInteger(value) : this.toDouble(value);\n }\n\n /**\n * Returns a value for a Date that's appropriate to put into a proto.\n */\n toTimestamp(timestamp: Timestamp): api.Timestamp {\n if (this.options.useProto3Json) {\n // Serialize to ISO-8601 date format, but with full nano resolution.\n // Since JS Date has only millis, let's only use it for the seconds and\n // then manually add the fractions to the end.\n const jsDateStr = new Date(timestamp.seconds * 1000).toISOString();\n // Remove .xxx frac part and Z in the end.\n const strUntilSeconds = jsDateStr.replace(/\\.\\d*/, '').replace('Z', '');\n // Pad the fraction out to 9 digits (nanos).\n const nanoStr = ('000000000' + timestamp.nanoseconds).slice(-9);\n\n return `${strUntilSeconds}.${nanoStr}Z`;\n } else {\n return {\n seconds: '' + timestamp.seconds,\n nanos: timestamp.nanoseconds\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } as any;\n }\n }\n\n private fromTimestamp(date: api.Timestamp): Timestamp {\n const timestamp = normalizeTimestamp(date);\n return new Timestamp(timestamp.seconds, timestamp.nanos);\n }\n\n /**\n * Returns a value for bytes that's appropriate to put in a proto.\n *\n * Visible for testing.\n */\n toBytes(bytes: Blob | ByteString): string | Uint8Array {\n if (this.options.useProto3Json) {\n return bytes.toBase64();\n } else {\n return bytes.toUint8Array();\n }\n }\n\n /**\n * Returns a ByteString based on the proto string value.\n */\n fromBytes(value: string | Uint8Array | undefined): ByteString {\n if (this.options.useProto3Json) {\n hardAssert(\n value === undefined || typeof value === 'string',\n 'value must be undefined or a string when using proto3 Json'\n );\n return ByteString.fromBase64String(value ? value : '');\n } else {\n hardAssert(\n value === undefined || value instanceof Uint8Array,\n 'value must be undefined or Uint8Array'\n );\n return ByteString.fromUint8Array(value ? value : new Uint8Array());\n }\n }\n\n toVersion(version: SnapshotVersion): api.Timestamp {\n return this.toTimestamp(version.toTimestamp());\n }\n\n fromVersion(version: api.Timestamp): SnapshotVersion {\n hardAssert(!!version, \"Trying to deserialize version that isn't set\");\n return SnapshotVersion.fromTimestamp(this.fromTimestamp(version));\n }\n\n toResourceName(path: ResourcePath, databaseId?: DatabaseId): string {\n return this.fullyQualifiedPrefixPath(databaseId || this.databaseId)\n .child('documents')\n .child(path)\n .canonicalString();\n }\n\n fromResourceName(name: string): ResourcePath {\n const resource = ResourcePath.fromString(name);\n hardAssert(\n isValidResourceName(resource),\n 'Tried to deserialize invalid key ' + resource.toString()\n );\n return resource;\n }\n\n toName(key: DocumentKey): string {\n return this.toResourceName(key.path);\n }\n\n fromName(name: string): DocumentKey {\n const resource = this.fromResourceName(name);\n hardAssert(\n resource.get(1) === this.databaseId.projectId,\n 'Tried to deserialize key from different project: ' +\n resource.get(1) +\n ' vs ' +\n this.databaseId.projectId\n );\n hardAssert(\n (!resource.get(3) && !this.databaseId.database) ||\n resource.get(3) === this.databaseId.database,\n 'Tried to deserialize key from different database: ' +\n resource.get(3) +\n ' vs ' +\n this.databaseId.database\n );\n return new DocumentKey(this.extractLocalPathFromResourceName(resource));\n }\n\n toQueryPath(path: ResourcePath): string {\n return this.toResourceName(path);\n }\n\n fromQueryPath(name: string): ResourcePath {\n const resourceName = this.fromResourceName(name);\n // In v1beta1 queries for collections at the root did not have a trailing\n // \"/documents\". In v1 all resource paths contain \"/documents\". Preserve the\n // ability to read the v1beta1 form for compatibility with queries persisted\n // in the local target cache.\n if (resourceName.length === 4) {\n return ResourcePath.EMPTY_PATH;\n }\n return this.extractLocalPathFromResourceName(resourceName);\n }\n\n get encodedDatabaseId(): string {\n const path = new ResourcePath([\n 'projects',\n this.databaseId.projectId,\n 'databases',\n this.databaseId.database\n ]);\n return path.canonicalString();\n }\n\n private fullyQualifiedPrefixPath(databaseId: DatabaseId): ResourcePath {\n return new ResourcePath([\n 'projects',\n databaseId.projectId,\n 'databases',\n databaseId.database\n ]);\n }\n\n private extractLocalPathFromResourceName(\n resourceName: ResourcePath\n ): ResourcePath {\n hardAssert(\n resourceName.length > 4 && resourceName.get(4) === 'documents',\n 'tried to deserialize invalid key ' + resourceName.toString()\n );\n return resourceName.popFirst(5);\n }\n\n /** Creates an api.Document from key and fields (but no create/update time) */\n toMutationDocument(key: DocumentKey, fields: ObjectValue): api.Document {\n return {\n name: this.toName(key),\n fields: fields.proto.mapValue.fields\n };\n }\n\n toDocument(document: Document): api.Document {\n debugAssert(\n !document.hasLocalMutations,\n \"Can't serialize documents with mutations.\"\n );\n return {\n name: this.toName(document.key),\n fields: document.toProto().mapValue.fields,\n updateTime: this.toTimestamp(document.version.toTimestamp())\n };\n }\n\n fromDocument(\n document: api.Document,\n hasCommittedMutations?: boolean\n ): Document {\n const key = this.fromName(document.name!);\n const version = this.fromVersion(document.updateTime!);\n const data = new ObjectValue({ mapValue: { fields: document.fields } });\n return new Document(key, version, data, {\n hasCommittedMutations: !!hasCommittedMutations\n });\n }\n\n private fromFound(doc: api.BatchGetDocumentsResponse): Document {\n hardAssert(\n !!doc.found,\n 'Tried to deserialize a found document from a missing document.'\n );\n assertPresent(doc.found.name, 'doc.found.name');\n assertPresent(doc.found.updateTime, 'doc.found.updateTime');\n const key = this.fromName(doc.found.name);\n const version = this.fromVersion(doc.found.updateTime);\n const data = new ObjectValue({ mapValue: { fields: doc.found.fields } });\n return new Document(key, version, data, {});\n }\n\n private fromMissing(result: api.BatchGetDocumentsResponse): NoDocument {\n hardAssert(\n !!result.missing,\n 'Tried to deserialize a missing document from a found document.'\n );\n hardAssert(\n !!result.readTime,\n 'Tried to deserialize a missing document without a read time.'\n );\n const key = this.fromName(result.missing);\n const version = this.fromVersion(result.readTime);\n return new NoDocument(key, version);\n }\n\n fromMaybeDocument(result: api.BatchGetDocumentsResponse): MaybeDocument {\n if ('found' in result) {\n return this.fromFound(result);\n } else if ('missing' in result) {\n return this.fromMissing(result);\n }\n return fail('invalid batch get response: ' + JSON.stringify(result));\n }\n\n fromWatchChange(change: api.ListenResponse): WatchChange {\n let watchChange: WatchChange;\n if ('targetChange' in change) {\n assertPresent(change.targetChange, 'targetChange');\n // proto3 default value is unset in JSON (undefined), so use 'NO_CHANGE'\n // if unset\n const state = this.fromWatchTargetChangeState(\n change.targetChange.targetChangeType || 'NO_CHANGE'\n );\n const targetIds: TargetId[] = change.targetChange.targetIds || [];\n\n const resumeToken = this.fromBytes(change.targetChange.resumeToken);\n const causeProto = change.targetChange!.cause;\n const cause = causeProto && this.fromRpcStatus(causeProto);\n watchChange = new WatchTargetChange(\n state,\n targetIds,\n resumeToken,\n cause || null\n );\n } else if ('documentChange' in change) {\n assertPresent(change.documentChange, 'documentChange');\n const entityChange = change.documentChange;\n assertPresent(entityChange.document, 'documentChange.name');\n assertPresent(entityChange.document.name, 'documentChange.document.name');\n assertPresent(\n entityChange.document.updateTime,\n 'documentChange.document.updateTime'\n );\n const key = this.fromName(entityChange.document.name);\n const version = this.fromVersion(entityChange.document.updateTime);\n const data = new ObjectValue({\n mapValue: { fields: entityChange.document.fields }\n });\n const doc = new Document(key, version, data, {});\n const updatedTargetIds = entityChange.targetIds || [];\n const removedTargetIds = entityChange.removedTargetIds || [];\n watchChange = new DocumentWatchChange(\n updatedTargetIds,\n removedTargetIds,\n doc.key,\n doc\n );\n } else if ('documentDelete' in change) {\n assertPresent(change.documentDelete, 'documentDelete');\n const docDelete = change.documentDelete;\n assertPresent(docDelete.document, 'documentDelete.document');\n const key = this.fromName(docDelete.document);\n const version = docDelete.readTime\n ? this.fromVersion(docDelete.readTime)\n : SnapshotVersion.min();\n const doc = new NoDocument(key, version);\n const removedTargetIds = docDelete.removedTargetIds || [];\n watchChange = new DocumentWatchChange([], removedTargetIds, doc.key, doc);\n } else if ('documentRemove' in change) {\n assertPresent(change.documentRemove, 'documentRemove');\n const docRemove = change.documentRemove;\n assertPresent(docRemove.document, 'documentRemove');\n const key = this.fromName(docRemove.document);\n const removedTargetIds = docRemove.removedTargetIds || [];\n watchChange = new DocumentWatchChange([], removedTargetIds, key, null);\n } else if ('filter' in change) {\n // TODO(dimond): implement existence filter parsing with strategy.\n assertPresent(change.filter, 'filter');\n const filter = change.filter;\n assertPresent(filter.targetId, 'filter.targetId');\n const count = filter.count || 0;\n const existenceFilter = new ExistenceFilter(count);\n const targetId = filter.targetId;\n watchChange = new ExistenceFilterChange(targetId, existenceFilter);\n } else {\n return fail('Unknown change type ' + JSON.stringify(change));\n }\n return watchChange;\n }\n\n fromWatchTargetChangeState(\n state: api.TargetChangeTargetChangeType\n ): WatchTargetChangeState {\n if (state === 'NO_CHANGE') {\n return WatchTargetChangeState.NoChange;\n } else if (state === 'ADD') {\n return WatchTargetChangeState.Added;\n } else if (state === 'REMOVE') {\n return WatchTargetChangeState.Removed;\n } else if (state === 'CURRENT') {\n return WatchTargetChangeState.Current;\n } else if (state === 'RESET') {\n return WatchTargetChangeState.Reset;\n } else {\n return fail('Got unexpected TargetChange.state: ' + state);\n }\n }\n\n versionFromListenResponse(change: api.ListenResponse): SnapshotVersion {\n // We have only reached a consistent snapshot for the entire stream if there\n // is a read_time set and it applies to all targets (i.e. the list of\n // targets is empty). The backend is guaranteed to send such responses.\n if (!('targetChange' in change)) {\n return SnapshotVersion.min();\n }\n const targetChange = change.targetChange!;\n if (targetChange.targetIds && targetChange.targetIds.length) {\n return SnapshotVersion.min();\n }\n if (!targetChange.readTime) {\n return SnapshotVersion.min();\n }\n return this.fromVersion(targetChange.readTime);\n }\n\n toMutation(mutation: Mutation): api.Write {\n let result: api.Write;\n if (mutation instanceof SetMutation) {\n result = {\n update: this.toMutationDocument(mutation.key, mutation.value)\n };\n } else if (mutation instanceof DeleteMutation) {\n result = { delete: this.toName(mutation.key) };\n } else if (mutation instanceof PatchMutation) {\n result = {\n update: this.toMutationDocument(mutation.key, mutation.data),\n updateMask: this.toDocumentMask(mutation.fieldMask)\n };\n } else if (mutation instanceof TransformMutation) {\n result = {\n transform: {\n document: this.toName(mutation.key),\n fieldTransforms: mutation.fieldTransforms.map(transform =>\n this.toFieldTransform(transform)\n )\n }\n };\n } else if (mutation instanceof VerifyMutation) {\n result = {\n verify: this.toName(mutation.key)\n };\n } else {\n return fail('Unknown mutation type ' + mutation.type);\n }\n\n if (!mutation.precondition.isNone) {\n result.currentDocument = this.toPrecondition(mutation.precondition);\n }\n\n return result;\n }\n\n fromMutation(proto: api.Write): Mutation {\n const precondition = proto.currentDocument\n ? this.fromPrecondition(proto.currentDocument)\n : Precondition.none();\n\n if (proto.update) {\n assertPresent(proto.update.name, 'name');\n const key = this.fromName(proto.update.name);\n const value = new ObjectValue({\n mapValue: { fields: proto.update.fields }\n });\n if (proto.updateMask) {\n const fieldMask = this.fromDocumentMask(proto.updateMask);\n return new PatchMutation(key, value, fieldMask, precondition);\n } else {\n return new SetMutation(key, value, precondition);\n }\n } else if (proto.delete) {\n const key = this.fromName(proto.delete);\n return new DeleteMutation(key, precondition);\n } else if (proto.transform) {\n const key = this.fromName(proto.transform.document!);\n const fieldTransforms = proto.transform.fieldTransforms!.map(transform =>\n this.fromFieldTransform(transform)\n );\n hardAssert(\n precondition.exists === true,\n 'Transforms only support precondition \"exists == true\"'\n );\n return new TransformMutation(key, fieldTransforms);\n } else if (proto.verify) {\n const key = this.fromName(proto.verify);\n return new VerifyMutation(key, precondition);\n } else {\n return fail('unknown mutation proto: ' + JSON.stringify(proto));\n }\n }\n\n private toPrecondition(precondition: Precondition): api.Precondition {\n debugAssert(!precondition.isNone, \"Can't serialize an empty precondition\");\n if (precondition.updateTime !== undefined) {\n return {\n updateTime: this.toVersion(precondition.updateTime)\n };\n } else if (precondition.exists !== undefined) {\n return { exists: precondition.exists };\n } else {\n return fail('Unknown precondition');\n }\n }\n\n private fromPrecondition(precondition: api.Precondition): Precondition {\n if (precondition.updateTime !== undefined) {\n return Precondition.updateTime(this.fromVersion(precondition.updateTime));\n } else if (precondition.exists !== undefined) {\n return Precondition.exists(precondition.exists);\n } else {\n return Precondition.none();\n }\n }\n\n private fromWriteResult(\n proto: api.WriteResult,\n commitTime: api.Timestamp\n ): MutationResult {\n // NOTE: Deletes don't have an updateTime.\n let version = proto.updateTime\n ? this.fromVersion(proto.updateTime)\n : this.fromVersion(commitTime);\n\n if (version.isEqual(SnapshotVersion.min())) {\n // The Firestore Emulator currently returns an update time of 0 for\n // deletes of non-existing documents (rather than null). This breaks the\n // test \"get deleted doc while offline with source=cache\" as NoDocuments\n // with version 0 are filtered by IndexedDb's RemoteDocumentCache.\n // TODO(#2149): Remove this when Emulator is fixed\n version = this.fromVersion(commitTime);\n }\n\n let transformResults: api.Value[] | null = null;\n if (proto.transformResults && proto.transformResults.length > 0) {\n transformResults = proto.transformResults;\n }\n return new MutationResult(version, transformResults);\n }\n\n fromWriteResults(\n protos: api.WriteResult[] | undefined,\n commitTime?: api.Timestamp\n ): MutationResult[] {\n if (protos && protos.length > 0) {\n hardAssert(\n commitTime !== undefined,\n 'Received a write result without a commit time'\n );\n return protos.map(proto => this.fromWriteResult(proto, commitTime));\n } else {\n return [];\n }\n }\n\n private toFieldTransform(fieldTransform: FieldTransform): api.FieldTransform {\n const transform = fieldTransform.transform;\n if (transform instanceof ServerTimestampTransform) {\n return {\n fieldPath: fieldTransform.field.canonicalString(),\n setToServerValue: 'REQUEST_TIME'\n };\n } else if (transform instanceof ArrayUnionTransformOperation) {\n return {\n fieldPath: fieldTransform.field.canonicalString(),\n appendMissingElements: {\n values: transform.elements\n }\n };\n } else if (transform instanceof ArrayRemoveTransformOperation) {\n return {\n fieldPath: fieldTransform.field.canonicalString(),\n removeAllFromArray: {\n values: transform.elements\n }\n };\n } else if (transform instanceof NumericIncrementTransformOperation) {\n return {\n fieldPath: fieldTransform.field.canonicalString(),\n increment: transform.operand\n };\n } else {\n throw fail('Unknown transform: ' + fieldTransform.transform);\n }\n }\n\n private fromFieldTransform(proto: api.FieldTransform): FieldTransform {\n let transform: TransformOperation | null = null;\n if ('setToServerValue' in proto) {\n hardAssert(\n proto.setToServerValue === 'REQUEST_TIME',\n 'Unknown server value transform proto: ' + JSON.stringify(proto)\n );\n transform = ServerTimestampTransform.instance;\n } else if ('appendMissingElements' in proto) {\n const values = proto.appendMissingElements!.values || [];\n transform = new ArrayUnionTransformOperation(values);\n } else if ('removeAllFromArray' in proto) {\n const values = proto.removeAllFromArray!.values || [];\n transform = new ArrayRemoveTransformOperation(values);\n } else if ('increment' in proto) {\n transform = new NumericIncrementTransformOperation(\n this,\n proto.increment!\n );\n } else {\n fail('Unknown transform proto: ' + JSON.stringify(proto));\n }\n const fieldPath = FieldPath.fromServerFormat(proto.fieldPath!);\n return new FieldTransform(fieldPath, transform!);\n }\n\n toDocumentsTarget(target: Target): api.DocumentsTarget {\n return { documents: [this.toQueryPath(target.path)] };\n }\n\n fromDocumentsTarget(documentsTarget: api.DocumentsTarget): Target {\n const count = documentsTarget.documents!.length;\n hardAssert(\n count === 1,\n 'DocumentsTarget contained other than 1 document: ' + count\n );\n const name = documentsTarget.documents![0];\n return Query.atPath(this.fromQueryPath(name)).toTarget();\n }\n\n toQueryTarget(target: Target): api.QueryTarget {\n // Dissect the path into parent, collectionId, and optional key filter.\n const result: api.QueryTarget = { structuredQuery: {} };\n const path = target.path;\n if (target.collectionGroup !== null) {\n debugAssert(\n path.length % 2 === 0,\n 'Collection Group queries should be within a document path or root.'\n );\n result.parent = this.toQueryPath(path);\n result.structuredQuery!.from = [\n {\n collectionId: target.collectionGroup,\n allDescendants: true\n }\n ];\n } else {\n debugAssert(\n path.length % 2 !== 0,\n 'Document queries with filters are not supported.'\n );\n result.parent = this.toQueryPath(path.popLast());\n result.structuredQuery!.from = [{ collectionId: path.lastSegment() }];\n }\n\n const where = this.toFilter(target.filters);\n if (where) {\n result.structuredQuery!.where = where;\n }\n\n const orderBy = this.toOrder(target.orderBy);\n if (orderBy) {\n result.structuredQuery!.orderBy = orderBy;\n }\n\n const limit = this.toInt32Proto(target.limit);\n if (limit !== null) {\n result.structuredQuery!.limit = limit;\n }\n\n if (target.startAt) {\n result.structuredQuery!.startAt = this.toCursor(target.startAt);\n }\n if (target.endAt) {\n result.structuredQuery!.endAt = this.toCursor(target.endAt);\n }\n\n return result;\n }\n\n fromQueryTarget(target: api.QueryTarget): Target {\n let path = this.fromQueryPath(target.parent!);\n\n const query = target.structuredQuery!;\n const fromCount = query.from ? query.from.length : 0;\n let collectionGroup: string | null = null;\n if (fromCount > 0) {\n hardAssert(\n fromCount === 1,\n 'StructuredQuery.from with more than one collection is not supported.'\n );\n const from = query.from![0];\n if (from.allDescendants) {\n collectionGroup = from.collectionId!;\n } else {\n path = path.child(from.collectionId!);\n }\n }\n\n let filterBy: Filter[] = [];\n if (query.where) {\n filterBy = this.fromFilter(query.where);\n }\n\n let orderBy: OrderBy[] = [];\n if (query.orderBy) {\n orderBy = this.fromOrder(query.orderBy);\n }\n\n let limit: number | null = null;\n if (query.limit) {\n limit = this.fromInt32Proto(query.limit);\n }\n\n let startAt: Bound | null = null;\n if (query.startAt) {\n startAt = this.fromCursor(query.startAt);\n }\n\n let endAt: Bound | null = null;\n if (query.endAt) {\n endAt = this.fromCursor(query.endAt);\n }\n\n return new Query(\n path,\n collectionGroup,\n orderBy,\n filterBy,\n limit,\n LimitType.First,\n startAt,\n endAt\n ).toTarget();\n }\n\n toListenRequestLabels(\n targetData: TargetData\n ): api.ApiClientObjectMap<string> | null {\n const value = this.toLabel(targetData.purpose);\n if (value == null) {\n return null;\n } else {\n return {\n 'goog-listen-tags': value\n };\n }\n }\n\n private toLabel(purpose: TargetPurpose): string | null {\n switch (purpose) {\n case TargetPurpose.Listen:\n return null;\n case TargetPurpose.ExistenceFilterMismatch:\n return 'existence-filter-mismatch';\n case TargetPurpose.LimboResolution:\n return 'limbo-document';\n default:\n return fail('Unrecognized query purpose: ' + purpose);\n }\n }\n\n toTarget(targetData: TargetData): api.Target {\n let result: api.Target;\n const target = targetData.target;\n\n if (target.isDocumentQuery()) {\n result = { documents: this.toDocumentsTarget(target) };\n } else {\n result = { query: this.toQueryTarget(target) };\n }\n\n result.targetId = targetData.targetId;\n\n if (targetData.resumeToken.approximateByteSize() > 0) {\n result.resumeToken = this.toBytes(targetData.resumeToken);\n }\n\n return result;\n }\n\n private toFilter(filters: Filter[]): api.Filter | undefined {\n if (filters.length === 0) {\n return;\n }\n const protos = filters.map(filter => {\n if (filter instanceof FieldFilter) {\n return this.toUnaryOrFieldFilter(filter);\n } else {\n return fail('Unrecognized filter: ' + JSON.stringify(filter));\n }\n });\n if (protos.length === 1) {\n return protos[0];\n }\n return { compositeFilter: { op: 'AND', filters: protos } };\n }\n\n private fromFilter(filter: api.Filter | undefined): Filter[] {\n if (!filter) {\n return [];\n } else if (filter.unaryFilter !== undefined) {\n return [this.fromUnaryFilter(filter)];\n } else if (filter.fieldFilter !== undefined) {\n return [this.fromFieldFilter(filter)];\n } else if (filter.compositeFilter !== undefined) {\n return filter.compositeFilter\n .filters!.map(f => this.fromFilter(f))\n .reduce((accum, current) => accum.concat(current));\n } else {\n return fail('Unknown filter: ' + JSON.stringify(filter));\n }\n }\n\n private toOrder(orderBys: OrderBy[]): api.Order[] | undefined {\n if (orderBys.length === 0) {\n return;\n }\n return orderBys.map(order => this.toPropertyOrder(order));\n }\n\n private fromOrder(orderBys: api.Order[]): OrderBy[] {\n return orderBys.map(order => this.fromPropertyOrder(order));\n }\n\n private toCursor(cursor: Bound): api.Cursor {\n return {\n before: cursor.before,\n values: cursor.position\n };\n }\n\n private fromCursor(cursor: api.Cursor): Bound {\n const before = !!cursor.before;\n const position = cursor.values || [];\n return new Bound(position, before);\n }\n\n // visible for testing\n toDirection(dir: Direction): api.OrderDirection {\n return DIRECTIONS[dir];\n }\n\n // visible for testing\n fromDirection(dir: api.OrderDirection | undefined): Direction | undefined {\n switch (dir) {\n case 'ASCENDING':\n return Direction.ASCENDING;\n case 'DESCENDING':\n return Direction.DESCENDING;\n default:\n return undefined;\n }\n }\n\n // visible for testing\n toOperatorName(op: Operator): api.FieldFilterOp {\n return OPERATORS[op];\n }\n\n fromOperatorName(op: api.FieldFilterOp): Operator {\n switch (op) {\n case 'EQUAL':\n return Operator.EQUAL;\n case 'GREATER_THAN':\n return Operator.GREATER_THAN;\n case 'GREATER_THAN_OR_EQUAL':\n return Operator.GREATER_THAN_OR_EQUAL;\n case 'LESS_THAN':\n return Operator.LESS_THAN;\n case 'LESS_THAN_OR_EQUAL':\n return Operator.LESS_THAN_OR_EQUAL;\n case 'ARRAY_CONTAINS':\n return Operator.ARRAY_CONTAINS;\n case 'IN':\n return Operator.IN;\n case 'ARRAY_CONTAINS_ANY':\n return Operator.ARRAY_CONTAINS_ANY;\n case 'OPERATOR_UNSPECIFIED':\n return fail('Unspecified operator');\n default:\n return fail('Unknown operator');\n }\n }\n\n toFieldPathReference(path: FieldPath): api.FieldReference {\n return { fieldPath: path.canonicalString() };\n }\n\n fromFieldPathReference(fieldReference: api.FieldReference): FieldPath {\n return FieldPath.fromServerFormat(fieldReference.fieldPath!);\n }\n\n // visible for testing\n toPropertyOrder(orderBy: OrderBy): api.Order {\n return {\n field: this.toFieldPathReference(orderBy.field),\n direction: this.toDirection(orderBy.dir)\n };\n }\n\n fromPropertyOrder(orderBy: api.Order): OrderBy {\n return new OrderBy(\n this.fromFieldPathReference(orderBy.field!),\n this.fromDirection(orderBy.direction)\n );\n }\n\n fromFieldFilter(filter: api.Filter): Filter {\n return FieldFilter.create(\n this.fromFieldPathReference(filter.fieldFilter!.field!),\n this.fromOperatorName(filter.fieldFilter!.op!),\n filter.fieldFilter!.value!\n );\n }\n\n // visible for testing\n toUnaryOrFieldFilter(filter: FieldFilter): api.Filter {\n if (filter.op === Operator.EQUAL) {\n if (isNanValue(filter.value)) {\n return {\n unaryFilter: {\n field: this.toFieldPathReference(filter.field),\n op: 'IS_NAN'\n }\n };\n } else if (isNullValue(filter.value)) {\n return {\n unaryFilter: {\n field: this.toFieldPathReference(filter.field),\n op: 'IS_NULL'\n }\n };\n }\n }\n return {\n fieldFilter: {\n field: this.toFieldPathReference(filter.field),\n op: this.toOperatorName(filter.op),\n value: filter.value\n }\n };\n }\n\n fromUnaryFilter(filter: api.Filter): Filter {\n switch (filter.unaryFilter!.op!) {\n case 'IS_NAN':\n const nanField = this.fromFieldPathReference(\n filter.unaryFilter!.field!\n );\n return FieldFilter.create(nanField, Operator.EQUAL, {\n doubleValue: NaN\n });\n case 'IS_NULL':\n const nullField = this.fromFieldPathReference(\n filter.unaryFilter!.field!\n );\n return FieldFilter.create(nullField, Operator.EQUAL, {\n nullValue: 'NULL_VALUE'\n });\n case 'OPERATOR_UNSPECIFIED':\n return fail('Unspecified filter');\n default:\n return fail('Unknown filter');\n }\n }\n\n toDocumentMask(fieldMask: FieldMask): api.DocumentMask {\n const canonicalFields: string[] = [];\n fieldMask.fields.forEach(field =>\n canonicalFields.push(field.canonicalString())\n );\n return {\n fieldPaths: canonicalFields\n };\n }\n\n fromDocumentMask(proto: api.DocumentMask): FieldMask {\n const paths = proto.fieldPaths || [];\n return new FieldMask(paths.map(path => FieldPath.fromServerFormat(path)));\n }\n}\n\nexport function isValidResourceName(path: ResourcePath): boolean {\n // Resource names have at least 4 components (project ID, database ID)\n return (\n path.length >= 4 &&\n path.get(0) === 'projects' &&\n path.get(2) === 'databases'\n );\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DatabaseId, DatabaseInfo } from '../core/database_info';\nimport { Connection } from '../remote/connection';\nimport { JsonProtoSerializer } from '../remote/serializer';\nimport { fail } from '../util/assert';\nimport { ConnectivityMonitor } from './../remote/connectivity_monitor';\n\n/**\n * Provides a common interface to load anything platform dependent, e.g.\n * the connection implementation.\n *\n * An implementation of this must be provided at compile time for the platform.\n */\n// TODO: Consider only exposing the APIs of 'document' and 'window' that we\n// use in our client.\nexport interface Platform {\n loadConnection(databaseInfo: DatabaseInfo): Promise<Connection>;\n newConnectivityMonitor(): ConnectivityMonitor;\n newSerializer(databaseId: DatabaseId): JsonProtoSerializer;\n\n /** Formats an object as a JSON string, suitable for logging. */\n formatJSON(value: unknown): string;\n\n /** Converts a Base64 encoded string to a binary string. */\n atob(encoded: string): string;\n\n /** Converts a binary string to a Base64 encoded string. */\n btoa(raw: string): string;\n\n /**\n * Generates `nBytes` of random bytes.\n *\n * If `nBytes < 0` , an error will be thrown.\n */\n randomBytes(nBytes: number): Uint8Array;\n\n /** The Platform's 'window' implementation or null if not available. */\n readonly window: Window | null;\n\n /** The Platform's 'document' implementation or null if not available. */\n readonly document: Document | null;\n\n /** True if and only if the Base64 conversion functions are available. */\n readonly base64Available: boolean;\n}\n\n/**\n * Provides singleton helpers where setup code can inject a platform at runtime.\n * setPlatform needs to be set before Firestore is used and must be set exactly\n * once.\n */\nexport class PlatformSupport {\n private static platform: Platform;\n static setPlatform(platform: Platform): void {\n if (PlatformSupport.platform) {\n fail('Platform already defined');\n }\n PlatformSupport.platform = platform;\n }\n\n static getPlatform(): Platform {\n if (!PlatformSupport.platform) {\n fail('Platform not set');\n }\n return PlatformSupport.platform;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Logger, LogLevel } from '@firebase/logger';\nimport { SDK_VERSION } from '../core/version';\nimport { PlatformSupport } from '../platform/platform';\n\nexport { LogLevel };\n\nconst logClient = new Logger('@firebase/firestore');\n\n// Helper methods are needed because variables can't be exported as read/write\nexport function getLogLevel(): LogLevel {\n return logClient.logLevel;\n}\n\nexport function setLogLevel(newLevel: LogLevel): void {\n logClient.logLevel = newLevel;\n}\n\nexport function logDebug(msg: string, ...obj: unknown[]): void {\n if (logClient.logLevel <= LogLevel.DEBUG) {\n const args = obj.map(argToString);\n logClient.debug(`Firestore (${SDK_VERSION}): ${msg}`, ...args);\n }\n}\n\nexport function logError(msg: string, ...obj: unknown[]): void {\n if (logClient.logLevel <= LogLevel.ERROR) {\n const args = obj.map(argToString);\n logClient.error(`Firestore (${SDK_VERSION}): ${msg}`, ...args);\n }\n}\n\n/**\n * Converts an additional log parameter to a string representation.\n */\nfunction argToString(obj: unknown): string | unknown {\n if (typeof obj === 'string') {\n return obj;\n } else {\n const platform = PlatformSupport.getPlatform();\n try {\n return platform.formatJSON(obj);\n } catch (e) {\n // Converting to JSON failed, just log the object directly\n return obj;\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SDK_VERSION } from '../core/version';\nimport { logError } from './log';\n\n/**\n * Unconditionally fails, throwing an Error with the given message.\n * Messages are stripped in production builds.\n *\n * Returns `never` and can be used in expressions:\n * @example\n * let futureVar = fail('not implemented yet');\n */\nexport function fail(failure: string = 'Unexpected state'): never {\n // Log the failure in addition to throw an exception, just in case the\n // exception is swallowed.\n const message =\n `FIRESTORE (${SDK_VERSION}) INTERNAL ASSERTION FAILED: ` + failure;\n logError(message);\n\n // NOTE: We don't use FirestoreError here because these are internal failures\n // that cannot be handled by the user. (Also it would create a circular\n // dependency between the error and assert modules which doesn't work.)\n throw new Error(message);\n}\n\n/**\n * Fails if the given assertion condition is false, throwing an Error with the\n * given message if it did.\n *\n * Messages are stripped in production builds.\n */\nexport function hardAssert(\n assertion: boolean,\n message?: string\n): asserts assertion {\n if (!assertion) {\n fail(message);\n }\n}\n\n/**\n * Fails if the given assertion condition is false, throwing an Error with the\n * given message if it did.\n *\n * The code of callsites invoking this function are stripped out in production\n * builds. Any side-effects of code within the debugAssert() invocation will not\n * happen in this case.\n */\nexport function debugAssert(\n assertion: boolean,\n message: string\n): asserts assertion {\n if (!assertion) {\n fail(message);\n }\n}\n\n/**\n * Casts `obj` to `T`. In non-production builds, verifies that `obj` is an\n * instance of `T` before casting.\n */\nexport function debugCast<T>(\n obj: object,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n constructor: { new (...args: any[]): T }\n): T {\n debugAssert(\n obj instanceof constructor,\n `Expected type '${constructor.name}', but was '${obj.constructor.name}'`\n );\n return obj as T;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { debugAssert } from './assert';\nimport { PlatformSupport } from '../platform/platform';\n\nexport type EventHandler<E> = (value: E) => void;\nexport interface Indexable {\n [k: string]: unknown;\n}\n\nexport class AutoId {\n static newId(): string {\n // Alphanumeric characters\n const chars =\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';\n // The largest byte value that is a multiple of `char.length`.\n const maxMultiple = Math.floor(256 / chars.length) * chars.length;\n debugAssert(\n 0 < maxMultiple && maxMultiple < 256,\n `Expect maxMultiple to be (0, 256), but got ${maxMultiple}`\n );\n\n let autoId = '';\n const targetLength = 20;\n while (autoId.length < targetLength) {\n const bytes = PlatformSupport.getPlatform().randomBytes(40);\n for (let i = 0; i < bytes.length; ++i) {\n // Only accept values that are [0, maxMultiple), this ensures they can\n // be evenly mapped to indices of `chars` via a modulo operation.\n if (autoId.length < targetLength && bytes[i] < maxMultiple) {\n autoId += chars.charAt(bytes[i] % chars.length);\n }\n }\n }\n debugAssert(autoId.length === targetLength, 'Invalid auto ID: ' + autoId);\n\n return autoId;\n }\n}\n\nexport function primitiveComparator<T>(left: T, right: T): number {\n if (left < right) {\n return -1;\n }\n if (left > right) {\n return 1;\n }\n return 0;\n}\n\nexport interface Equatable<T> {\n isEqual(other: T): boolean;\n}\n\n/** Helper to compare arrays using isEqual(). */\nexport function arrayEquals<T>(\n left: T[],\n right: T[],\n comparator: (l: T, r: T) => boolean\n): boolean {\n if (left.length !== right.length) {\n return false;\n }\n return left.every((value, index) => comparator(value, right[index]));\n}\n/**\n * Returns the immediate lexicographically-following string. This is useful to\n * construct an inclusive range for indexeddb iterators.\n */\nexport function immediateSuccessor(s: string): string {\n // Return the input string, with an additional NUL byte appended.\n return s + '\\0';\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { primitiveComparator } from '../util/misc';\n\nexport class DatabaseInfo {\n /**\n * Constructs a DatabaseInfo using the provided host, databaseId and\n * persistenceKey.\n *\n * @param databaseId The database to use.\n * @param persistenceKey A unique identifier for this Firestore's local\n * storage (used in conjunction with the databaseId).\n * @param host The Firestore backend host to connect to.\n * @param ssl Whether to use SSL when connecting.\n * @param forceLongPolling Whether to use the forceLongPolling option\n * when using WebChannel as the network transport.\n */\n constructor(\n readonly databaseId: DatabaseId,\n readonly persistenceKey: string,\n readonly host: string,\n readonly ssl: boolean,\n readonly forceLongPolling: boolean\n ) {}\n}\n\n/** The default database name for a project. */\nconst DEFAULT_DATABASE_NAME = '(default)';\n\n/** Represents the database ID a Firestore client is associated with. */\nexport class DatabaseId {\n readonly database: string;\n constructor(readonly projectId: string, database?: string) {\n this.database = database ? database : DEFAULT_DATABASE_NAME;\n }\n\n get isDefaultDatabase(): boolean {\n return this.database === DEFAULT_DATABASE_NAME;\n }\n\n isEqual(other: {}): boolean {\n return (\n other instanceof DatabaseId &&\n other.projectId === this.projectId &&\n other.database === this.database\n );\n }\n\n compareTo(other: DatabaseId): number {\n return (\n primitiveComparator(this.projectId, other.projectId) ||\n primitiveComparator(this.database, other.database)\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Equatable } from './misc';\nimport { forEach, isEmpty } from './obj';\n\ntype Entry<K, V> = [K, V];\n\n/**\n * A map implementation that uses objects as keys. Objects must implement the\n * Equatable interface and must be immutable. Entries in the map are stored\n * together with the key being produced from the mapKeyFn. This map\n * automatically handles collisions of keys.\n */\nexport class ObjectMap<KeyType extends Equatable<KeyType>, ValueType> {\n /**\n * The inner map for a key -> value pair. Due to the possibility of\n * collisions we keep a list of entries that we do a linear search through\n * to find an actual match. Note that collisions should be rare, so we still\n * expect near constant time lookups in practice.\n */\n private inner: {\n [canonicalId: string]: Array<Entry<KeyType, ValueType>>;\n } = {};\n\n constructor(private mapKeyFn: (key: KeyType) => string) {}\n\n /** Get a value for this key, or undefined if it does not exist. */\n get(key: KeyType): ValueType | undefined {\n const id = this.mapKeyFn(key);\n const matches = this.inner[id];\n if (matches === undefined) {\n return undefined;\n }\n for (const [otherKey, value] of matches) {\n if (otherKey.isEqual(key)) {\n return value;\n }\n }\n return undefined;\n }\n\n has(key: KeyType): boolean {\n return this.get(key) !== undefined;\n }\n\n /** Put this key and value in the map. */\n set(key: KeyType, value: ValueType): void {\n const id = this.mapKeyFn(key);\n const matches = this.inner[id];\n if (matches === undefined) {\n this.inner[id] = [[key, value]];\n return;\n }\n for (let i = 0; i < matches.length; i++) {\n if (matches[i][0].isEqual(key)) {\n matches[i] = [key, value];\n return;\n }\n }\n matches.push([key, value]);\n }\n\n /**\n * Remove this key from the map. Returns a boolean if anything was deleted.\n */\n delete(key: KeyType): boolean {\n const id = this.mapKeyFn(key);\n const matches = this.inner[id];\n if (matches === undefined) {\n return false;\n }\n for (let i = 0; i < matches.length; i++) {\n if (matches[i][0].isEqual(key)) {\n if (matches.length === 1) {\n delete this.inner[id];\n } else {\n matches.splice(i, 1);\n }\n return true;\n }\n }\n return false;\n }\n\n forEach(fn: (key: KeyType, val: ValueType) => void): void {\n forEach(this.inner, (_, entries) => {\n for (const [k, v] of entries) {\n fn(k, v);\n }\n });\n }\n\n isEmpty(): boolean {\n return isEmpty(this.inner);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Timestamp } from '../api/timestamp';\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { BatchId } from '../core/types';\nimport { hardAssert, debugAssert } from '../util/assert';\nimport { arrayEquals } from '../util/misc';\nimport { ByteString } from '../util/byte_string';\nimport {\n documentKeySet,\n DocumentKeySet,\n DocumentVersionMap,\n documentVersionMap,\n MaybeDocumentMap\n} from './collections';\nimport { MaybeDocument } from './document';\nimport { DocumentKey } from './document_key';\nimport { Mutation, MutationResult } from './mutation';\n\nexport const BATCHID_UNKNOWN = -1;\n\n/**\n * A batch of mutations that will be sent as one unit to the backend.\n */\nexport class MutationBatch {\n /**\n * @param batchId The unique ID of this mutation batch.\n * @param localWriteTime The original write time of this mutation.\n * @param baseMutations Mutations that are used to populate the base\n * values when this mutation is applied locally. This can be used to locally\n * overwrite values that are persisted in the remote document cache. Base\n * mutations are never sent to the backend.\n * @param mutations The user-provided mutations in this mutation batch.\n * User-provided mutations are applied both locally and remotely on the\n * backend.\n */\n constructor(\n public batchId: BatchId,\n public localWriteTime: Timestamp,\n public baseMutations: Mutation[],\n public mutations: Mutation[]\n ) {\n debugAssert(mutations.length > 0, 'Cannot create an empty mutation batch');\n }\n\n /**\n * Applies all the mutations in this MutationBatch to the specified document\n * to create a new remote document\n *\n * @param docKey The key of the document to apply mutations to.\n * @param maybeDoc The document to apply mutations to.\n * @param batchResult The result of applying the MutationBatch to the\n * backend.\n */\n applyToRemoteDocument(\n docKey: DocumentKey,\n maybeDoc: MaybeDocument | null,\n batchResult: MutationBatchResult\n ): MaybeDocument | null {\n if (maybeDoc) {\n debugAssert(\n maybeDoc.key.isEqual(docKey),\n `applyToRemoteDocument: key ${docKey} should match maybeDoc key\n ${maybeDoc.key}`\n );\n }\n\n const mutationResults = batchResult.mutationResults;\n debugAssert(\n mutationResults.length === this.mutations.length,\n `Mismatch between mutations length\n (${this.mutations.length}) and mutation results length\n (${mutationResults.length}).`\n );\n\n for (let i = 0; i < this.mutations.length; i++) {\n const mutation = this.mutations[i];\n if (mutation.key.isEqual(docKey)) {\n const mutationResult = mutationResults[i];\n maybeDoc = mutation.applyToRemoteDocument(maybeDoc, mutationResult);\n }\n }\n return maybeDoc;\n }\n\n /**\n * Computes the local view of a document given all the mutations in this\n * batch.\n *\n * @param docKey The key of the document to apply mutations to.\n * @param maybeDoc The document to apply mutations to.\n */\n applyToLocalView(\n docKey: DocumentKey,\n maybeDoc: MaybeDocument | null\n ): MaybeDocument | null {\n if (maybeDoc) {\n debugAssert(\n maybeDoc.key.isEqual(docKey),\n `applyToLocalDocument: key ${docKey} should match maybeDoc key\n ${maybeDoc.key}`\n );\n }\n\n // First, apply the base state. This allows us to apply non-idempotent\n // transform against a consistent set of values.\n for (const mutation of this.baseMutations) {\n if (mutation.key.isEqual(docKey)) {\n maybeDoc = mutation.applyToLocalView(\n maybeDoc,\n maybeDoc,\n this.localWriteTime\n );\n }\n }\n\n const baseDoc = maybeDoc;\n\n // Second, apply all user-provided mutations.\n for (const mutation of this.mutations) {\n if (mutation.key.isEqual(docKey)) {\n maybeDoc = mutation.applyToLocalView(\n maybeDoc,\n baseDoc,\n this.localWriteTime\n );\n }\n }\n return maybeDoc;\n }\n\n /**\n * Computes the local view for all provided documents given the mutations in\n * this batch.\n */\n applyToLocalDocumentSet(maybeDocs: MaybeDocumentMap): MaybeDocumentMap {\n // TODO(mrschmidt): This implementation is O(n^2). If we apply the mutations\n // directly (as done in `applyToLocalView()`), we can reduce the complexity\n // to O(n).\n let mutatedDocuments = maybeDocs;\n this.mutations.forEach(m => {\n const mutatedDocument = this.applyToLocalView(\n m.key,\n maybeDocs.get(m.key)\n );\n if (mutatedDocument) {\n mutatedDocuments = mutatedDocuments.insert(m.key, mutatedDocument);\n }\n });\n return mutatedDocuments;\n }\n\n keys(): DocumentKeySet {\n return this.mutations.reduce(\n (keys, m) => keys.add(m.key),\n documentKeySet()\n );\n }\n\n isEqual(other: MutationBatch): boolean {\n return (\n this.batchId === other.batchId &&\n arrayEquals(this.mutations, other.mutations, (l, r) => l.isEqual(r)) &&\n arrayEquals(this.baseMutations, other.baseMutations, (l, r) =>\n l.isEqual(r)\n )\n );\n }\n}\n\n/** The result of applying a mutation batch to the backend. */\nexport class MutationBatchResult {\n private constructor(\n readonly batch: MutationBatch,\n readonly commitVersion: SnapshotVersion,\n readonly mutationResults: MutationResult[],\n readonly streamToken: ByteString,\n /**\n * A pre-computed mapping from each mutated document to the resulting\n * version.\n */\n readonly docVersions: DocumentVersionMap\n ) {}\n\n /**\n * Creates a new MutationBatchResult for the given batch and results. There\n * must be one result for each mutation in the batch. This static factory\n * caches a document=>version mapping (docVersions).\n */\n static from(\n batch: MutationBatch,\n commitVersion: SnapshotVersion,\n results: MutationResult[],\n streamToken: ByteString\n ): MutationBatchResult {\n hardAssert(\n batch.mutations.length === results.length,\n 'Mutations sent ' +\n batch.mutations.length +\n ' must equal results received ' +\n results.length\n );\n\n let versionMap = documentVersionMap();\n const mutations = batch.mutations;\n for (let i = 0; i < mutations.length; i++) {\n versionMap = versionMap.insert(mutations[i].key, results[i].version);\n }\n\n return new MutationBatchResult(\n batch,\n commitVersion,\n results,\n streamToken,\n versionMap\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { fail } from '../util/assert';\n\nexport type FulfilledHandler<T, R> =\n | ((result: T) => R | PersistencePromise<R>)\n | null;\nexport type RejectedHandler<R> =\n | ((reason: Error) => R | PersistencePromise<R>)\n | null;\nexport type Resolver<T> = (value?: T) => void;\nexport type Rejector = (error: Error) => void;\n\n/**\n * PersistencePromise<> is essentially a re-implementation of Promise<> except\n * it has a .next() method instead of .then() and .next() and .catch() callbacks\n * are executed synchronously when a PersistencePromise resolves rather than\n * asynchronously (Promise<> implementations use setImmediate() or similar).\n *\n * This is necessary to interoperate with IndexedDB which will automatically\n * commit transactions if control is returned to the event loop without\n * synchronously initiating another operation on the transaction.\n *\n * NOTE: .then() and .catch() only allow a single consumer, unlike normal\n * Promises.\n */\nexport class PersistencePromise<T> {\n // NOTE: next/catchCallback will always point to our own wrapper functions,\n // not the user's raw next() or catch() callbacks.\n private nextCallback: FulfilledHandler<T, unknown> = null;\n private catchCallback: RejectedHandler<unknown> = null;\n\n // When the operation resolves, we'll set result or error and mark isDone.\n private result: T | undefined = undefined;\n private error: Error | undefined = undefined;\n private isDone = false;\n\n // Set to true when .then() or .catch() are called and prevents additional\n // chaining.\n private callbackAttached = false;\n\n constructor(callback: (resolve: Resolver<T>, reject: Rejector) => void) {\n callback(\n value => {\n this.isDone = true;\n this.result = value;\n if (this.nextCallback) {\n // value should be defined unless T is Void, but we can't express\n // that in the type system.\n this.nextCallback(value!);\n }\n },\n error => {\n this.isDone = true;\n this.error = error;\n if (this.catchCallback) {\n this.catchCallback(error);\n }\n }\n );\n }\n\n catch<R>(\n fn: (error: Error) => R | PersistencePromise<R>\n ): PersistencePromise<R> {\n return this.next(undefined, fn);\n }\n\n next<R>(\n nextFn?: FulfilledHandler<T, R>,\n catchFn?: RejectedHandler<R>\n ): PersistencePromise<R> {\n if (this.callbackAttached) {\n fail('Called next() or catch() twice for PersistencePromise');\n }\n this.callbackAttached = true;\n if (this.isDone) {\n if (!this.error) {\n return this.wrapSuccess(nextFn, this.result!);\n } else {\n return this.wrapFailure(catchFn, this.error);\n }\n } else {\n return new PersistencePromise<R>((resolve, reject) => {\n this.nextCallback = (value: T) => {\n this.wrapSuccess(nextFn, value).next(resolve, reject);\n };\n this.catchCallback = (error: Error) => {\n this.wrapFailure(catchFn, error).next(resolve, reject);\n };\n });\n }\n }\n\n toPromise(): Promise<T> {\n return new Promise((resolve, reject) => {\n this.next(resolve, reject);\n });\n }\n\n private wrapUserFunction<R>(\n fn: () => R | PersistencePromise<R>\n ): PersistencePromise<R> {\n try {\n const result = fn();\n if (result instanceof PersistencePromise) {\n return result;\n } else {\n return PersistencePromise.resolve(result);\n }\n } catch (e) {\n return PersistencePromise.reject<R>(e);\n }\n }\n\n private wrapSuccess<R>(\n nextFn: FulfilledHandler<T, R> | undefined,\n value: T\n ): PersistencePromise<R> {\n if (nextFn) {\n return this.wrapUserFunction(() => nextFn(value));\n } else {\n // If there's no nextFn, then R must be the same as T\n return PersistencePromise.resolve<R>((value as unknown) as R);\n }\n }\n\n private wrapFailure<R>(\n catchFn: RejectedHandler<R> | undefined,\n error: Error\n ): PersistencePromise<R> {\n if (catchFn) {\n return this.wrapUserFunction(() => catchFn(error));\n } else {\n return PersistencePromise.reject<R>(error);\n }\n }\n\n static resolve(): PersistencePromise<void>;\n static resolve<R>(result: R): PersistencePromise<R>;\n static resolve<R>(result?: R): PersistencePromise<R | void> {\n return new PersistencePromise<R | void>((resolve, reject) => {\n resolve(result);\n });\n }\n\n static reject<R>(error: Error): PersistencePromise<R> {\n return new PersistencePromise<R>((resolve, reject) => {\n reject(error);\n });\n }\n\n static waitFor(\n // Accept all Promise types in waitFor().\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n all: { forEach: (cb: (el: PersistencePromise<any>) => void) => void }\n ): PersistencePromise<void> {\n return new PersistencePromise<void>((resolve, reject) => {\n let expectedCount = 0;\n let resolvedCount = 0;\n let done = false;\n\n all.forEach(element => {\n ++expectedCount;\n element.next(\n () => {\n ++resolvedCount;\n if (done && resolvedCount === expectedCount) {\n resolve();\n }\n },\n err => reject(err)\n );\n });\n\n done = true;\n if (resolvedCount === expectedCount) {\n resolve();\n }\n });\n }\n\n /**\n * Given an array of predicate functions that asynchronously evaluate to a\n * boolean, implements a short-circuiting `or` between the results. Predicates\n * will be evaluated until one of them returns `true`, then stop. The final\n * result will be whether any of them returned `true`.\n */\n static or(\n predicates: Array<() => PersistencePromise<boolean>>\n ): PersistencePromise<boolean> {\n let p: PersistencePromise<boolean> = PersistencePromise.resolve<boolean>(\n false\n );\n for (const predicate of predicates) {\n p = p.next(isTrue => {\n if (isTrue) {\n return PersistencePromise.resolve<boolean>(isTrue);\n } else {\n return predicate();\n }\n });\n }\n return p;\n }\n\n /**\n * Given an iterable, call the given function on each element in the\n * collection and wait for all of the resulting concurrent PersistencePromises\n * to resolve.\n */\n static forEach<R, S>(\n collection: { forEach: (cb: (r: R, s: S) => void) => void },\n f:\n | ((r: R, s: S) => PersistencePromise<void>)\n | ((r: R) => PersistencePromise<void>)\n ): PersistencePromise<void>;\n static forEach<R>(\n collection: { forEach: (cb: (r: R) => void) => void },\n f: (r: R) => PersistencePromise<void>\n ): PersistencePromise<void>;\n static forEach<R, S>(\n collection: { forEach: (cb: (r: R, s?: S) => void) => void },\n f: (r: R, s?: S) => PersistencePromise<void>\n ): PersistencePromise<void> {\n const promises: Array<PersistencePromise<void>> = [];\n collection.forEach((r, s) => {\n promises.push(f.call(this, r, s));\n });\n return this.waitFor(promises);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Query } from '../core/query';\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport {\n DocumentKeySet,\n documentKeySet,\n DocumentMap,\n documentMap,\n MaybeDocumentMap,\n maybeDocumentMap,\n NullableMaybeDocumentMap,\n nullableMaybeDocumentMap\n} from '../model/collections';\nimport { Document, MaybeDocument, NoDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { MutationBatch } from '../model/mutation_batch';\nimport { ResourcePath } from '../model/path';\n\nimport { debugAssert } from '../util/assert';\nimport { IndexManager } from './index_manager';\nimport { MutationQueue } from './mutation_queue';\nimport { PatchMutation } from '../model/mutation';\nimport { PersistenceTransaction } from './persistence';\nimport { PersistencePromise } from './persistence_promise';\nimport { RemoteDocumentCache } from './remote_document_cache';\n\n/**\n * A readonly view of the local state of all documents we're tracking (i.e. we\n * have a cached version in remoteDocumentCache or local mutations for the\n * document). The view is computed by applying the mutations in the\n * MutationQueue to the RemoteDocumentCache.\n */\nexport class LocalDocumentsView {\n constructor(\n readonly remoteDocumentCache: RemoteDocumentCache,\n readonly mutationQueue: MutationQueue,\n readonly indexManager: IndexManager\n ) {}\n\n /**\n * Get the local view of the document identified by `key`.\n *\n * @return Local view of the document or null if we don't have any cached\n * state for it.\n */\n getDocument(\n transaction: PersistenceTransaction,\n key: DocumentKey\n ): PersistencePromise<MaybeDocument | null> {\n return this.mutationQueue\n .getAllMutationBatchesAffectingDocumentKey(transaction, key)\n .next(batches => this.getDocumentInternal(transaction, key, batches));\n }\n\n /** Internal version of `getDocument` that allows reusing batches. */\n private getDocumentInternal(\n transaction: PersistenceTransaction,\n key: DocumentKey,\n inBatches: MutationBatch[]\n ): PersistencePromise<MaybeDocument | null> {\n return this.remoteDocumentCache.getEntry(transaction, key).next(doc => {\n for (const batch of inBatches) {\n doc = batch.applyToLocalView(key, doc);\n }\n return doc;\n });\n }\n\n // Returns the view of the given `docs` as they would appear after applying\n // all mutations in the given `batches`.\n private applyLocalMutationsToDocuments(\n transaction: PersistenceTransaction,\n docs: NullableMaybeDocumentMap,\n batches: MutationBatch[]\n ): NullableMaybeDocumentMap {\n let results = nullableMaybeDocumentMap();\n docs.forEach((key, localView) => {\n for (const batch of batches) {\n localView = batch.applyToLocalView(key, localView);\n }\n results = results.insert(key, localView);\n });\n return results;\n }\n\n /**\n * Gets the local view of the documents identified by `keys`.\n *\n * If we don't have cached state for a document in `keys`, a NoDocument will\n * be stored for that key in the resulting set.\n */\n getDocuments(\n transaction: PersistenceTransaction,\n keys: DocumentKeySet\n ): PersistencePromise<MaybeDocumentMap> {\n return this.remoteDocumentCache\n .getEntries(transaction, keys)\n .next(docs => this.getLocalViewOfDocuments(transaction, docs));\n }\n\n /**\n * Similar to `getDocuments`, but creates the local view from the given\n * `baseDocs` without retrieving documents from the local store.\n */\n getLocalViewOfDocuments(\n transaction: PersistenceTransaction,\n baseDocs: NullableMaybeDocumentMap\n ): PersistencePromise<MaybeDocumentMap> {\n return this.mutationQueue\n .getAllMutationBatchesAffectingDocumentKeys(transaction, baseDocs)\n .next(batches => {\n const docs = this.applyLocalMutationsToDocuments(\n transaction,\n baseDocs,\n batches\n );\n let results = maybeDocumentMap();\n docs.forEach((key, maybeDoc) => {\n // TODO(http://b/32275378): Don't conflate missing / deleted.\n if (!maybeDoc) {\n maybeDoc = new NoDocument(key, SnapshotVersion.min());\n }\n results = results.insert(key, maybeDoc);\n });\n\n return results;\n });\n }\n\n /**\n * Performs a query against the local view of all documents.\n *\n * @param transaction The persistence transaction.\n * @param query The query to match documents against.\n * @param sinceReadTime If not set to SnapshotVersion.min(), return only\n * documents that have been read since this snapshot version (exclusive).\n */\n getDocumentsMatchingQuery(\n transaction: PersistenceTransaction,\n query: Query,\n sinceReadTime: SnapshotVersion\n ): PersistencePromise<DocumentMap> {\n if (query.isDocumentQuery()) {\n return this.getDocumentsMatchingDocumentQuery(transaction, query.path);\n } else if (query.isCollectionGroupQuery()) {\n return this.getDocumentsMatchingCollectionGroupQuery(\n transaction,\n query,\n sinceReadTime\n );\n } else {\n return this.getDocumentsMatchingCollectionQuery(\n transaction,\n query,\n sinceReadTime\n );\n }\n }\n\n private getDocumentsMatchingDocumentQuery(\n transaction: PersistenceTransaction,\n docPath: ResourcePath\n ): PersistencePromise<DocumentMap> {\n // Just do a simple document lookup.\n return this.getDocument(transaction, new DocumentKey(docPath)).next(\n maybeDoc => {\n let result = documentMap();\n if (maybeDoc instanceof Document) {\n result = result.insert(maybeDoc.key, maybeDoc);\n }\n return result;\n }\n );\n }\n\n private getDocumentsMatchingCollectionGroupQuery(\n transaction: PersistenceTransaction,\n query: Query,\n sinceReadTime: SnapshotVersion\n ): PersistencePromise<DocumentMap> {\n debugAssert(\n query.path.isEmpty(),\n 'Currently we only support collection group queries at the root.'\n );\n const collectionId = query.collectionGroup!;\n let results = documentMap();\n return this.indexManager\n .getCollectionParents(transaction, collectionId)\n .next(parents => {\n // Perform a collection query against each parent that contains the\n // collectionId and aggregate the results.\n return PersistencePromise.forEach(parents, (parent: ResourcePath) => {\n const collectionQuery = query.asCollectionQueryAtPath(\n parent.child(collectionId)\n );\n return this.getDocumentsMatchingCollectionQuery(\n transaction,\n collectionQuery,\n sinceReadTime\n ).next(r => {\n r.forEach((key, doc) => {\n results = results.insert(key, doc);\n });\n });\n }).next(() => results);\n });\n }\n\n private getDocumentsMatchingCollectionQuery(\n transaction: PersistenceTransaction,\n query: Query,\n sinceReadTime: SnapshotVersion\n ): PersistencePromise<DocumentMap> {\n // Query the remote documents and overlay mutations.\n let results: DocumentMap;\n let mutationBatches: MutationBatch[];\n return this.remoteDocumentCache\n .getDocumentsMatchingQuery(transaction, query, sinceReadTime)\n .next(queryResults => {\n results = queryResults;\n return this.mutationQueue.getAllMutationBatchesAffectingQuery(\n transaction,\n query\n );\n })\n .next(matchingMutationBatches => {\n mutationBatches = matchingMutationBatches;\n // It is possible that a PatchMutation can make a document match a query, even if\n // the version in the RemoteDocumentCache is not a match yet (waiting for server\n // to ack). To handle this, we find all document keys affected by the PatchMutations\n // that are not in `result` yet, and back fill them via `remoteDocumentCache.getEntries`,\n // otherwise those `PatchMutations` will be ignored because no base document can be found,\n // and lead to missing result for the query.\n return this.addMissingBaseDocuments(\n transaction,\n mutationBatches,\n results\n ).next(mergedDocuments => {\n results = mergedDocuments;\n\n for (const batch of mutationBatches) {\n for (const mutation of batch.mutations) {\n const key = mutation.key;\n const baseDoc = results.get(key);\n const mutatedDoc = mutation.applyToLocalView(\n baseDoc,\n baseDoc,\n batch.localWriteTime\n );\n if (mutatedDoc instanceof Document) {\n results = results.insert(key, mutatedDoc);\n } else {\n results = results.remove(key);\n }\n }\n }\n });\n })\n .next(() => {\n // Finally, filter out any documents that don't actually match\n // the query.\n results.forEach((key, doc) => {\n if (!query.matches(doc)) {\n results = results.remove(key);\n }\n });\n\n return results;\n });\n }\n\n private addMissingBaseDocuments(\n transaction: PersistenceTransaction,\n matchingMutationBatches: MutationBatch[],\n existingDocuments: DocumentMap\n ): PersistencePromise<DocumentMap> {\n let missingBaseDocEntriesForPatching = documentKeySet();\n for (const batch of matchingMutationBatches) {\n for (const mutation of batch.mutations) {\n if (\n mutation instanceof PatchMutation &&\n existingDocuments.get(mutation.key) === null\n ) {\n missingBaseDocEntriesForPatching = missingBaseDocEntriesForPatching.add(\n mutation.key\n );\n }\n }\n }\n\n let mergedDocuments = existingDocuments;\n return this.remoteDocumentCache\n .getEntries(transaction, missingBaseDocEntriesForPatching)\n .next(missingBaseDocs => {\n missingBaseDocs.forEach((key, doc) => {\n if (doc !== null && doc instanceof Document) {\n mergedDocuments = mergedDocuments.insert(key, doc);\n }\n });\n return mergedDocuments;\n });\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TargetId } from '../core/types';\nimport { ChangeType, ViewSnapshot } from '../core/view_snapshot';\nimport { documentKeySet, DocumentKeySet } from '../model/collections';\n\n/**\n * A set of changes to what documents are currently in view and out of view for\n * a given query. These changes are sent to the LocalStore by the View (via\n * the SyncEngine) and are used to pin / unpin documents as appropriate.\n */\nexport class LocalViewChanges {\n constructor(\n readonly targetId: TargetId,\n readonly fromCache: boolean,\n readonly addedKeys: DocumentKeySet,\n readonly removedKeys: DocumentKeySet\n ) {}\n\n static fromSnapshot(\n targetId: TargetId,\n viewSnapshot: ViewSnapshot\n ): LocalViewChanges {\n let addedKeys = documentKeySet();\n let removedKeys = documentKeySet();\n\n for (const docChange of viewSnapshot.docChanges) {\n switch (docChange.type) {\n case ChangeType.Added:\n addedKeys = addedKeys.add(docChange.doc.key);\n break;\n case ChangeType.Removed:\n removedKeys = removedKeys.add(docChange.doc.key);\n break;\n default:\n // do nothing\n }\n }\n\n return new LocalViewChanges(\n targetId,\n viewSnapshot.fromCache,\n addedKeys,\n removedKeys\n );\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ListenSequenceNumber } from './types';\n\n/**\n * `SequenceNumberSyncer` defines the methods required to keep multiple instances of a\n * `ListenSequence` in sync.\n */\nexport interface SequenceNumberSyncer {\n // Notify the syncer that a new sequence number has been used.\n writeSequenceNumber(sequenceNumber: ListenSequenceNumber): void;\n // Setting this property allows the syncer to notify when a sequence number has been used, and\n // and lets the ListenSequence adjust its internal previous value accordingly.\n sequenceNumberHandler:\n | ((sequenceNumber: ListenSequenceNumber) => void)\n | null;\n}\n\n/**\n * `ListenSequence` is a monotonic sequence. It is initialized with a minimum value to\n * exceed. All subsequent calls to next will return increasing values. If provided with a\n * `SequenceNumberSyncer`, it will additionally bump its next value when told of a new value, as\n * well as write out sequence numbers that it produces via `next()`.\n */\nexport class ListenSequence {\n static readonly INVALID: ListenSequenceNumber = -1;\n\n private writeNewSequenceNumber?: (\n newSequenceNumber: ListenSequenceNumber\n ) => void;\n\n constructor(\n private previousValue: ListenSequenceNumber,\n sequenceNumberSyncer?: SequenceNumberSyncer\n ) {\n if (sequenceNumberSyncer) {\n sequenceNumberSyncer.sequenceNumberHandler = sequenceNumber =>\n this.setPreviousValue(sequenceNumber);\n this.writeNewSequenceNumber = sequenceNumber =>\n sequenceNumberSyncer.writeSequenceNumber(sequenceNumber);\n }\n }\n\n private setPreviousValue(\n externalPreviousValue: ListenSequenceNumber\n ): ListenSequenceNumber {\n this.previousValue = Math.max(externalPreviousValue, this.previousValue);\n return this.previousValue;\n }\n\n next(): ListenSequenceNumber {\n const nextValue = ++this.previousValue;\n if (this.writeNewSequenceNumber) {\n this.writeNewSequenceNumber(nextValue);\n }\n return nextValue;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface Resolver<R> {\n (value?: R | Promise<R>): void;\n}\n\nexport interface Rejecter {\n (reason?: Error): void;\n}\n\nexport class Deferred<R> {\n promise: Promise<R>;\n // Assigned synchronously in constructor by Promise constructor callback.\n resolve!: Resolver<R>;\n reject!: Rejecter;\n\n constructor() {\n this.promise = new Promise((resolve: Resolver<R>, reject: Rejecter) => {\n this.resolve = resolve;\n this.reject = reject;\n });\n }\n}\n\n/**\n * Takes an array of values and a function from a value to a Promise. The function is run on each\n * value sequentially, waiting for the previous promise to resolve before starting the next one.\n * The returned promise resolves once the function has been run on all values.\n */\nexport function sequence<T>(\n values: T[],\n fn: (value: T) => Promise<void>\n): Promise<void> {\n let p = Promise.resolve();\n for (const value of values) {\n p = p.then(() => fn(value));\n }\n return p;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { AsyncQueue, DelayedOperation, TimerId } from '../util/async_queue';\nimport { logDebug } from '../util/log';\n\nconst LOG_TAG = 'ExponentialBackoff';\n\n/**\n * Initial backoff time in milliseconds after an error.\n * Set to 1s according to https://cloud.google.com/apis/design/errors.\n */\nconst DEFAULT_BACKOFF_INITIAL_DELAY_MS = 1000;\n\nconst DEFAULT_BACKOFF_FACTOR = 1.5;\n\n/** Maximum backoff time in milliseconds */\nconst DEFAULT_BACKOFF_MAX_DELAY_MS = 60 * 1000;\n\n/**\n * A helper for running delayed tasks following an exponential backoff curve\n * between attempts.\n *\n * Each delay is made up of a \"base\" delay which follows the exponential\n * backoff curve, and a +/- 50% \"jitter\" that is calculated and added to the\n * base delay. This prevents clients from accidentally synchronizing their\n * delays causing spikes of load to the backend.\n */\nexport class ExponentialBackoff {\n private currentBaseMs: number = 0;\n private timerPromise: DelayedOperation<void> | null = null;\n /** The last backoff attempt, as epoch milliseconds. */\n private lastAttemptTime = Date.now();\n\n constructor(\n /**\n * The AsyncQueue to run backoff operations on.\n */\n private readonly queue: AsyncQueue,\n /**\n * The ID to use when scheduling backoff operations on the AsyncQueue.\n */\n private readonly timerId: TimerId,\n /**\n * The initial delay (used as the base delay on the first retry attempt).\n * Note that jitter will still be applied, so the actual delay could be as\n * little as 0.5*initialDelayMs.\n */\n private readonly initialDelayMs: number = DEFAULT_BACKOFF_INITIAL_DELAY_MS,\n /**\n * The multiplier to use to determine the extended base delay after each\n * attempt.\n */\n private readonly backoffFactor: number = DEFAULT_BACKOFF_FACTOR,\n /**\n * The maximum base delay after which no further backoff is performed.\n * Note that jitter will still be applied, so the actual delay could be as\n * much as 1.5*maxDelayMs.\n */\n private readonly maxDelayMs: number = DEFAULT_BACKOFF_MAX_DELAY_MS\n ) {\n this.reset();\n }\n\n /**\n * Resets the backoff delay.\n *\n * The very next backoffAndWait() will have no delay. If it is called again\n * (i.e. due to an error), initialDelayMs (plus jitter) will be used, and\n * subsequent ones will increase according to the backoffFactor.\n */\n reset(): void {\n this.currentBaseMs = 0;\n }\n\n /**\n * Resets the backoff delay to the maximum delay (e.g. for use after a\n * RESOURCE_EXHAUSTED error).\n */\n resetToMax(): void {\n this.currentBaseMs = this.maxDelayMs;\n }\n\n /**\n * Returns a promise that resolves after currentDelayMs, and increases the\n * delay for any subsequent attempts. If there was a pending backoff operation\n * already, it will be canceled.\n */\n backoffAndRun(op: () => Promise<void>): void {\n // Cancel any pending backoff operation.\n this.cancel();\n\n // First schedule using the current base (which may be 0 and should be\n // honored as such).\n const desiredDelayWithJitterMs = Math.floor(\n this.currentBaseMs + this.jitterDelayMs()\n );\n\n // Guard against lastAttemptTime being in the future due to a clock change.\n const delaySoFarMs = Math.max(0, Date.now() - this.lastAttemptTime);\n\n // Guard against the backoff delay already being past.\n const remainingDelayMs = Math.max(\n 0,\n desiredDelayWithJitterMs - delaySoFarMs\n );\n\n if (remainingDelayMs > 0) {\n logDebug(\n LOG_TAG,\n `Backing off for ${remainingDelayMs} ms ` +\n `(base delay: ${this.currentBaseMs} ms, ` +\n `delay with jitter: ${desiredDelayWithJitterMs} ms, ` +\n `last attempt: ${delaySoFarMs} ms ago)`\n );\n }\n\n this.timerPromise = this.queue.enqueueAfterDelay(\n this.timerId,\n remainingDelayMs,\n () => {\n this.lastAttemptTime = Date.now();\n return op();\n }\n );\n\n // Apply backoff factor to determine next delay and ensure it is within\n // bounds.\n this.currentBaseMs *= this.backoffFactor;\n if (this.currentBaseMs < this.initialDelayMs) {\n this.currentBaseMs = this.initialDelayMs;\n }\n if (this.currentBaseMs > this.maxDelayMs) {\n this.currentBaseMs = this.maxDelayMs;\n }\n }\n\n skipBackoff(): void {\n if (this.timerPromise !== null) {\n this.timerPromise.skipDelay();\n this.timerPromise = null;\n }\n }\n\n cancel(): void {\n if (this.timerPromise !== null) {\n this.timerPromise.cancel();\n this.timerPromise = null;\n }\n }\n\n /** Returns a random value in the range [-currentBaseMs/2, currentBaseMs/2] */\n private jitterDelayMs(): number {\n return (Math.random() - 0.5) * this.currentBaseMs;\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ResourcePath } from '../model/path';\nimport { debugAssert } from '../util/assert';\nimport { SortedSet } from '../util/sorted_set';\nimport { IndexManager } from './index_manager';\nimport { PersistenceTransaction } from './persistence';\nimport { PersistencePromise } from './persistence_promise';\n\n/**\n * An in-memory implementation of IndexManager.\n */\nexport class MemoryIndexManager implements IndexManager {\n private collectionParentIndex = new MemoryCollectionParentIndex();\n\n addToCollectionParentIndex(\n transaction: PersistenceTransaction,\n collectionPath: ResourcePath\n ): PersistencePromise<void> {\n this.collectionParentIndex.add(collectionPath);\n return PersistencePromise.resolve();\n }\n\n getCollectionParents(\n transaction: PersistenceTransaction,\n collectionId: string\n ): PersistencePromise<ResourcePath[]> {\n return PersistencePromise.resolve(\n this.collectionParentIndex.getEntries(collectionId)\n );\n }\n}\n\n/**\n * Internal implementation of the collection-parent index exposed by MemoryIndexManager.\n * Also used for in-memory caching by IndexedDbIndexManager and initial index population\n * in indexeddb_schema.ts\n */\nexport class MemoryCollectionParentIndex {\n private index = {} as {\n [collectionId: string]: SortedSet<ResourcePath>;\n };\n\n // Returns false if the entry already existed.\n add(collectionPath: ResourcePath): boolean {\n debugAssert(collectionPath.length % 2 === 1, 'Expected a collection path.');\n const collectionId = collectionPath.lastSegment();\n const parentPath = collectionPath.popLast();\n const existingParents =\n this.index[collectionId] ||\n new SortedSet<ResourcePath>(ResourcePath.comparator);\n const added = !existingParents.has(parentPath);\n this.index[collectionId] = existingParents.add(parentPath);\n return added;\n }\n\n has(collectionPath: ResourcePath): boolean {\n const collectionId = collectionPath.lastSegment();\n const parentPath = collectionPath.popLast();\n const existingParents = this.index[collectionId];\n return existingParents && existingParents.has(parentPath);\n }\n\n getEntries(collectionId: string): ResourcePath[] {\n const parentPaths =\n this.index[collectionId] ||\n new SortedSet<ResourcePath>(ResourcePath.comparator);\n return parentPaths.toArray();\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TargetId } from './types';\n\n/** Offset to ensure non-overlapping target ids. */\nconst OFFSET = 2;\n\n/**\n * Generates monotonically increasing target IDs for sending targets to the\n * watch stream.\n *\n * The client constructs two generators, one for the target cache, and one for\n * for the sync engine (to generate limbo documents targets). These\n * generators produce non-overlapping IDs (by using even and odd IDs\n * respectively).\n *\n * By separating the target ID space, the query cache can generate target IDs\n * that persist across client restarts, while sync engine can independently\n * generate in-memory target IDs that are transient and can be reused after a\n * restart.\n */\nexport class TargetIdGenerator {\n constructor(private lastId: number) {}\n\n next(): TargetId {\n this.lastId += OFFSET;\n return this.lastId;\n }\n\n static forTargetCache(): TargetIdGenerator {\n // The target cache generator must return '2' in its first call to `next()`\n // as there is no differentiation in the protocol layer between an unset\n // number and the number '0'. If we were to sent a target with target ID\n // '0', the backend would consider it unset and replace it with its own ID.\n return new TargetIdGenerator(2 - OFFSET);\n }\n\n static forSyncEngine(): TargetIdGenerator {\n // Sync engine assigns target IDs for limbo document detection.\n return new TargetIdGenerator(1 - OFFSET);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getUA } from '@firebase/util';\nimport { debugAssert } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport { logDebug, logError } from '../util/log';\nimport { Deferred } from '../util/promise';\nimport { SCHEMA_VERSION } from './indexeddb_schema';\nimport { PersistencePromise } from './persistence_promise';\n\n// References to `window` are guarded by SimpleDb.isAvailable()\n/* eslint-disable no-restricted-globals */\n\nconst LOG_TAG = 'SimpleDb';\n\n/**\n * The maximum number of retry attempts for an IndexedDb transaction that fails\n * with a DOMException.\n */\nconst TRANSACTION_RETRY_COUNT = 3;\n\n// The different modes supported by `SimpleDb.runTransaction()`\ntype SimpleDbTransactionMode = 'readonly' | 'readwrite';\n\nexport interface SimpleDbSchemaConverter {\n createOrUpgrade(\n db: IDBDatabase,\n txn: IDBTransaction,\n fromVersion: number,\n toVersion: number\n ): PersistencePromise<void>;\n}\n\n/**\n * Provides a wrapper around IndexedDb with a simplified interface that uses\n * Promise-like return values to chain operations. Real promises cannot be used\n * since .then() continuations are executed asynchronously (e.g. via\n * .setImmediate), which would cause IndexedDB to end the transaction.\n * See PersistencePromise for more details.\n */\nexport class SimpleDb {\n /**\n * Opens the specified database, creating or upgrading it if necessary.\n *\n * Note that `version` must not be a downgrade. IndexedDB does not support downgrading the schema\n * version. We currently do not support any way to do versioning outside of IndexedDB's versioning\n * mechanism, as only version-upgrade transactions are allowed to do things like create\n * objectstores.\n */\n static openOrCreate(\n name: string,\n version: number,\n schemaConverter: SimpleDbSchemaConverter\n ): Promise<SimpleDb> {\n debugAssert(\n SimpleDb.isAvailable(),\n 'IndexedDB not supported in current environment.'\n );\n logDebug(LOG_TAG, 'Opening database:', name);\n return new PersistencePromise<SimpleDb>((resolve, reject) => {\n // TODO(mikelehen): Investigate browser compatibility.\n // https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API/Using_IndexedDB\n // suggests IE9 and older WebKit browsers handle upgrade\n // differently. They expect setVersion, as described here:\n // https://developer.mozilla.org/en-US/docs/Web/API/IDBVersionChangeRequest/setVersion\n const request = window.indexedDB.open(name, version);\n\n request.onsuccess = (event: Event) => {\n const db = (event.target as IDBOpenDBRequest).result;\n resolve(new SimpleDb(db));\n };\n\n request.onblocked = () => {\n reject(\n new FirestoreError(\n Code.FAILED_PRECONDITION,\n 'Cannot upgrade IndexedDB schema while another tab is open. ' +\n 'Close all tabs that access Firestore and reload this page to proceed.'\n )\n );\n };\n\n request.onerror = (event: Event) => {\n const error: DOMException = (event.target as IDBOpenDBRequest).error!;\n if (error.name === 'VersionError') {\n reject(\n new FirestoreError(\n Code.FAILED_PRECONDITION,\n 'A newer version of the Firestore SDK was previously used and so the persisted ' +\n 'data is not compatible with the version of the SDK you are now using. The SDK ' +\n 'will operate with persistence disabled. If you need persistence, please ' +\n 're-upgrade to a newer version of the SDK or else clear the persisted IndexedDB ' +\n 'data for your app to start fresh.'\n )\n );\n } else {\n reject(error);\n }\n };\n\n request.onupgradeneeded = (event: IDBVersionChangeEvent) => {\n logDebug(\n LOG_TAG,\n 'Database \"' + name + '\" requires upgrade from version:',\n event.oldVersion\n );\n const db = (event.target as IDBOpenDBRequest).result;\n schemaConverter\n .createOrUpgrade(\n db,\n request.transaction!,\n event.oldVersion,\n SCHEMA_VERSION\n )\n .next(() => {\n logDebug(\n LOG_TAG,\n 'Database upgrade to version ' + SCHEMA_VERSION + ' complete'\n );\n });\n };\n }).toPromise();\n }\n\n /** Deletes the specified database. */\n static delete(name: string): Promise<void> {\n logDebug(LOG_TAG, 'Removing database:', name);\n return wrapRequest<void>(window.indexedDB.deleteDatabase(name)).toPromise();\n }\n\n /** Returns true if IndexedDB is available in the current environment. */\n static isAvailable(): boolean {\n if (typeof window === 'undefined' || window.indexedDB == null) {\n return false;\n }\n\n if (SimpleDb.isMockPersistence()) {\n return true;\n }\n\n // In some Node environments, `window` is defined, but `window.navigator` is\n // not. We don't support IndexedDB persistence in Node if the\n // isMockPersistence() check above returns false.\n if (window.navigator === undefined) {\n return false;\n }\n\n // We extensively use indexed array values and compound keys,\n // which IE and Edge do not support. However, they still have indexedDB\n // defined on the window, so we need to check for them here and make sure\n // to return that persistence is not enabled for those browsers.\n // For tracking support of this feature, see here:\n // https://developer.microsoft.com/en-us/microsoft-edge/platform/status/indexeddbarraysandmultientrysupport/\n\n // Check the UA string to find out the browser.\n const ua = getUA();\n\n // IE 10\n // ua = 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Trident/6.0)';\n\n // IE 11\n // ua = 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko';\n\n // Edge\n // ua = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML,\n // like Gecko) Chrome/39.0.2171.71 Safari/537.36 Edge/12.0';\n\n // iOS Safari: Disable for users running iOS version < 10.\n const iOSVersion = SimpleDb.getIOSVersion(ua);\n const isUnsupportedIOS = 0 < iOSVersion && iOSVersion < 10;\n\n // Android browser: Disable for userse running version < 4.5.\n const androidVersion = SimpleDb.getAndroidVersion(ua);\n const isUnsupportedAndroid = 0 < androidVersion && androidVersion < 4.5;\n\n if (\n ua.indexOf('MSIE ') > 0 ||\n ua.indexOf('Trident/') > 0 ||\n ua.indexOf('Edge/') > 0 ||\n isUnsupportedIOS ||\n isUnsupportedAndroid\n ) {\n return false;\n } else {\n return true;\n }\n }\n\n /**\n * Returns true if the backing IndexedDB store is the Node IndexedDBShim\n * (see https://github.com/axemclion/IndexedDBShim).\n */\n static isMockPersistence(): boolean {\n return (\n typeof process !== 'undefined' &&\n process.env?.USE_MOCK_PERSISTENCE === 'YES'\n );\n }\n\n /** Helper to get a typed SimpleDbStore from a transaction. */\n static getStore<KeyType extends IDBValidKey, ValueType extends unknown>(\n txn: SimpleDbTransaction,\n store: string\n ): SimpleDbStore<KeyType, ValueType> {\n return txn.store<KeyType, ValueType>(store);\n }\n\n // visible for testing\n /** Parse User Agent to determine iOS version. Returns -1 if not found. */\n static getIOSVersion(ua: string): number {\n const iOSVersionRegex = ua.match(/i(?:phone|pad|pod) os ([\\d_]+)/i);\n const version = iOSVersionRegex\n ? iOSVersionRegex[1]\n .split('_')\n .slice(0, 2)\n .join('.')\n : '-1';\n return Number(version);\n }\n\n // visible for testing\n /** Parse User Agent to determine Android version. Returns -1 if not found. */\n static getAndroidVersion(ua: string): number {\n const androidVersionRegex = ua.match(/Android ([\\d.]+)/i);\n const version = androidVersionRegex\n ? androidVersionRegex[1]\n .split('.')\n .slice(0, 2)\n .join('.')\n : '-1';\n return Number(version);\n }\n\n constructor(private db: IDBDatabase) {\n const iOSVersion = SimpleDb.getIOSVersion(getUA());\n // NOTE: According to https://bugs.webkit.org/show_bug.cgi?id=197050, the\n // bug we're checking for should exist in iOS >= 12.2 and < 13, but for\n // whatever reason it's much harder to hit after 12.2 so we only proactively\n // log on 12.2.\n if (iOSVersion === 12.2) {\n logError(\n 'Firestore persistence suffers from a bug in iOS 12.2 ' +\n 'Safari that may cause your app to stop working. See ' +\n 'https://stackoverflow.com/q/56496296/110915 for details ' +\n 'and a potential workaround.'\n );\n }\n }\n\n setVersionChangeListener(\n versionChangeListener: (event: IDBVersionChangeEvent) => void\n ): void {\n this.db.onversionchange = (event: IDBVersionChangeEvent) => {\n return versionChangeListener(event);\n };\n }\n\n async runTransaction<T>(\n mode: SimpleDbTransactionMode,\n objectStores: string[],\n transactionFn: (transaction: SimpleDbTransaction) => PersistencePromise<T>\n ): Promise<T> {\n const readonly = mode === 'readonly';\n let attemptNumber = 0;\n\n while (true) {\n ++attemptNumber;\n\n const transaction = SimpleDbTransaction.open(\n this.db,\n readonly ? 'readonly' : 'readwrite',\n objectStores\n );\n try {\n const transactionFnResult = transactionFn(transaction)\n .catch(error => {\n // Abort the transaction if there was an error.\n transaction.abort(error);\n // We cannot actually recover, and calling `abort()` will cause the transaction's\n // completion promise to be rejected. This in turn means that we won't use\n // `transactionFnResult` below. We return a rejection here so that we don't add the\n // possibility of returning `void` to the type of `transactionFnResult`.\n return PersistencePromise.reject<T>(error);\n })\n .toPromise();\n\n // As noted above, errors are propagated by aborting the transaction. So\n // we swallow any error here to avoid the browser logging it as unhandled.\n transactionFnResult.catch(() => {});\n\n // Wait for the transaction to complete (i.e. IndexedDb's onsuccess event to\n // fire), but still return the original transactionFnResult back to the\n // caller.\n await transaction.completionPromise;\n return transactionFnResult;\n } catch (error) {\n // TODO(schmidt-sebastian): We could probably be smarter about this and\n // not retry exceptions that are likely unrecoverable (such as quota\n // exceeded errors).\n\n // Note: We cannot use an instanceof check for FirestoreException, since the\n // exception is wrapped in a generic error by our async/await handling.\n const retryable =\n error.name !== 'FirebaseError' &&\n attemptNumber < TRANSACTION_RETRY_COUNT;\n logDebug(\n LOG_TAG,\n 'Transaction failed with error: %s. Retrying: %s.',\n error.message,\n retryable\n );\n\n if (!retryable) {\n return Promise.reject(error);\n }\n }\n }\n }\n\n close(): void {\n this.db.close();\n }\n}\n\n/**\n * A controller for iterating over a key range or index. It allows an iterate\n * callback to delete the currently-referenced object, or jump to a new key\n * within the key range or index.\n */\nexport class IterationController {\n private shouldStop = false;\n private nextKey: IDBValidKey | null = null;\n\n constructor(private dbCursor: IDBCursorWithValue) {}\n\n get isDone(): boolean {\n return this.shouldStop;\n }\n\n get skipToKey(): IDBValidKey | null {\n return this.nextKey;\n }\n\n set cursor(value: IDBCursorWithValue) {\n this.dbCursor = value;\n }\n\n /**\n * This function can be called to stop iteration at any point.\n */\n done(): void {\n this.shouldStop = true;\n }\n\n /**\n * This function can be called to skip to that next key, which could be\n * an index or a primary key.\n */\n skip(key: IDBValidKey): void {\n this.nextKey = key;\n }\n\n /**\n * Delete the current cursor value from the object store.\n *\n * NOTE: You CANNOT do this with a keysOnly query.\n */\n delete(): PersistencePromise<void> {\n return wrapRequest<void>(this.dbCursor.delete());\n }\n}\n\n/**\n * Callback used with iterate() method.\n */\nexport type IterateCallback<KeyType, ValueType> = (\n key: KeyType,\n value: ValueType,\n control: IterationController\n) => void | PersistencePromise<void>;\n\n/** Options available to the iterate() method. */\nexport interface IterateOptions {\n /** Index to iterate over (else primary keys will be iterated) */\n index?: string;\n\n /** IndxedDB Range to iterate over (else entire store will be iterated) */\n range?: IDBKeyRange;\n\n /** If true, values aren't read while iterating. */\n keysOnly?: boolean;\n\n /** If true, iterate over the store in reverse. */\n reverse?: boolean;\n}\n\n/** An error that wraps exceptions that thrown during IndexedDB execution. */\nexport class IndexedDbTransactionError extends FirestoreError {\n name = 'IndexedDbTransactionError';\n\n constructor(cause: Error) {\n super(Code.UNAVAILABLE, 'IndexedDB transaction failed: ' + cause);\n }\n}\n\n/** Verifies whether `e` is an IndexedDbTransactionError. */\nexport function isIndexedDbTransactionError(e: Error): boolean {\n // Use name equality, as instanceof checks on errors don't work with errors\n // that wrap other errors.\n return e.name === 'IndexedDbTransactionError';\n}\n\n/**\n * Wraps an IDBTransaction and exposes a store() method to get a handle to a\n * specific object store.\n */\nexport class SimpleDbTransaction {\n private aborted = false;\n\n /**\n * A promise that resolves with the result of the IndexedDb transaction.\n */\n private readonly completionDeferred = new Deferred<void>();\n\n static open(\n db: IDBDatabase,\n mode: IDBTransactionMode,\n objectStoreNames: string[]\n ): SimpleDbTransaction {\n return new SimpleDbTransaction(db.transaction(objectStoreNames, mode));\n }\n\n constructor(private readonly transaction: IDBTransaction) {\n this.transaction.oncomplete = () => {\n this.completionDeferred.resolve();\n };\n this.transaction.onabort = () => {\n if (transaction.error) {\n this.completionDeferred.reject(\n new IndexedDbTransactionError(transaction.error)\n );\n } else {\n this.completionDeferred.resolve();\n }\n };\n this.transaction.onerror = (event: Event) => {\n const error = checkForAndReportiOSError(\n (event.target as IDBRequest).error!\n );\n this.completionDeferred.reject(new IndexedDbTransactionError(error));\n };\n }\n\n get completionPromise(): Promise<void> {\n return this.completionDeferred.promise;\n }\n\n abort(error?: Error): void {\n if (error) {\n this.completionDeferred.reject(error);\n }\n\n if (!this.aborted) {\n logDebug(\n LOG_TAG,\n 'Aborting transaction:',\n error ? error.message : 'Client-initiated abort'\n );\n this.aborted = true;\n this.transaction.abort();\n }\n }\n\n /**\n * Returns a SimpleDbStore<KeyType, ValueType> for the specified store. All\n * operations performed on the SimpleDbStore happen within the context of this\n * transaction and it cannot be used anymore once the transaction is\n * completed.\n *\n * Note that we can't actually enforce that the KeyType and ValueType are\n * correct, but they allow type safety through the rest of the consuming code.\n */\n store<KeyType extends IDBValidKey, ValueType extends unknown>(\n storeName: string\n ): SimpleDbStore<KeyType, ValueType> {\n const store = this.transaction.objectStore(storeName);\n debugAssert(!!store, 'Object store not part of transaction: ' + storeName);\n return new SimpleDbStore<KeyType, ValueType>(store);\n }\n}\n\n/**\n * A wrapper around an IDBObjectStore providing an API that:\n *\n * 1) Has generic KeyType / ValueType parameters to provide strongly-typed\n * methods for acting against the object store.\n * 2) Deals with IndexedDB's onsuccess / onerror event callbacks, making every\n * method return a PersistencePromise instead.\n * 3) Provides a higher-level API to avoid needing to do excessive wrapping of\n * intermediate IndexedDB types (IDBCursorWithValue, etc.)\n */\nexport class SimpleDbStore<\n KeyType extends IDBValidKey,\n ValueType extends unknown\n> {\n constructor(private store: IDBObjectStore) {}\n\n /**\n * Writes a value into the Object Store.\n *\n * @param key Optional explicit key to use when writing the object, else the\n * key will be auto-assigned (e.g. via the defined keyPath for the store).\n * @param value The object to write.\n */\n put(value: ValueType): PersistencePromise<void>;\n put(key: KeyType, value: ValueType): PersistencePromise<void>;\n put(\n keyOrValue: KeyType | ValueType,\n value?: ValueType\n ): PersistencePromise<void> {\n let request;\n if (value !== undefined) {\n logDebug(LOG_TAG, 'PUT', this.store.name, keyOrValue, value);\n request = this.store.put(value, keyOrValue as KeyType);\n } else {\n logDebug(LOG_TAG, 'PUT', this.store.name, '<auto-key>', keyOrValue);\n request = this.store.put(keyOrValue as ValueType);\n }\n return wrapRequest<void>(request);\n }\n\n /**\n * Adds a new value into an Object Store and returns the new key. Similar to\n * IndexedDb's `add()`, this method will fail on primary key collisions.\n *\n * @param value The object to write.\n * @return The key of the value to add.\n */\n add(value: ValueType): PersistencePromise<KeyType> {\n logDebug(LOG_TAG, 'ADD', this.store.name, value, value);\n const request = this.store.add(value as ValueType);\n return wrapRequest<KeyType>(request);\n }\n\n /**\n * Gets the object with the specified key from the specified store, or null\n * if no object exists with the specified key.\n *\n * @key The key of the object to get.\n * @return The object with the specified key or null if no object exists.\n */\n get(key: KeyType): PersistencePromise<ValueType | null> {\n const request = this.store.get(key);\n // We're doing an unsafe cast to ValueType.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return wrapRequest<any>(request).next(result => {\n // Normalize nonexistence to null.\n if (result === undefined) {\n result = null;\n }\n logDebug(LOG_TAG, 'GET', this.store.name, key, result);\n return result;\n });\n }\n\n delete(key: KeyType | IDBKeyRange): PersistencePromise<void> {\n logDebug(LOG_TAG, 'DELETE', this.store.name, key);\n const request = this.store.delete(key);\n return wrapRequest<void>(request);\n }\n\n /**\n * If we ever need more of the count variants, we can add overloads. For now,\n * all we need is to count everything in a store.\n *\n * Returns the number of rows in the store.\n */\n count(): PersistencePromise<number> {\n logDebug(LOG_TAG, 'COUNT', this.store.name);\n const request = this.store.count();\n return wrapRequest<number>(request);\n }\n\n loadAll(): PersistencePromise<ValueType[]>;\n loadAll(range: IDBKeyRange): PersistencePromise<ValueType[]>;\n loadAll(index: string, range: IDBKeyRange): PersistencePromise<ValueType[]>;\n loadAll(\n indexOrRange?: string | IDBKeyRange,\n range?: IDBKeyRange\n ): PersistencePromise<ValueType[]> {\n const cursor = this.cursor(this.options(indexOrRange, range));\n const results: ValueType[] = [];\n return this.iterateCursor(cursor, (key, value) => {\n results.push(value);\n }).next(() => {\n return results;\n });\n }\n\n deleteAll(): PersistencePromise<void>;\n deleteAll(range: IDBKeyRange): PersistencePromise<void>;\n deleteAll(index: string, range: IDBKeyRange): PersistencePromise<void>;\n deleteAll(\n indexOrRange?: string | IDBKeyRange,\n range?: IDBKeyRange\n ): PersistencePromise<void> {\n logDebug(LOG_TAG, 'DELETE ALL', this.store.name);\n const options = this.options(indexOrRange, range);\n options.keysOnly = false;\n const cursor = this.cursor(options);\n return this.iterateCursor(cursor, (key, value, control) => {\n // NOTE: Calling delete() on a cursor is documented as more efficient than\n // calling delete() on an object store with a single key\n // (https://developer.mozilla.org/en-US/docs/Web/API/IDBObjectStore/delete),\n // however, this requires us *not* to use a keysOnly cursor\n // (https://developer.mozilla.org/en-US/docs/Web/API/IDBCursor/delete). We\n // may want to compare the performance of each method.\n return control.delete();\n });\n }\n\n /**\n * Iterates over keys and values in an object store.\n *\n * @param options Options specifying how to iterate the objects in the store.\n * @param callback will be called for each iterated object. Iteration can be\n * canceled at any point by calling the doneFn passed to the callback.\n * The callback can return a PersistencePromise if it performs async\n * operations but note that iteration will continue without waiting for them\n * to complete.\n * @returns A PersistencePromise that resolves once all PersistencePromises\n * returned by callbacks resolve.\n */\n iterate(\n callback: IterateCallback<KeyType, ValueType>\n ): PersistencePromise<void>;\n iterate(\n options: IterateOptions,\n callback: IterateCallback<KeyType, ValueType>\n ): PersistencePromise<void>;\n iterate(\n optionsOrCallback: IterateOptions | IterateCallback<KeyType, ValueType>,\n callback?: IterateCallback<KeyType, ValueType>\n ): PersistencePromise<void> {\n let options;\n if (!callback) {\n options = {};\n callback = optionsOrCallback as IterateCallback<KeyType, ValueType>;\n } else {\n options = optionsOrCallback as IterateOptions;\n }\n const cursor = this.cursor(options);\n return this.iterateCursor(cursor, callback);\n }\n\n /**\n * Iterates over a store, but waits for the given callback to complete for\n * each entry before iterating the next entry. This allows the callback to do\n * asynchronous work to determine if this iteration should continue.\n *\n * The provided callback should return `true` to continue iteration, and\n * `false` otherwise.\n */\n iterateSerial(\n callback: (k: KeyType, v: ValueType) => PersistencePromise<boolean>\n ): PersistencePromise<void> {\n const cursorRequest = this.cursor({});\n return new PersistencePromise((resolve, reject) => {\n cursorRequest.onerror = (event: Event) => {\n const error = checkForAndReportiOSError(\n (event.target as IDBRequest).error!\n );\n reject(error);\n };\n cursorRequest.onsuccess = (event: Event) => {\n const cursor: IDBCursorWithValue = (event.target as IDBRequest).result;\n if (!cursor) {\n resolve();\n return;\n }\n\n callback(cursor.primaryKey as KeyType, cursor.value).next(\n shouldContinue => {\n if (shouldContinue) {\n cursor.continue();\n } else {\n resolve();\n }\n }\n );\n };\n });\n }\n\n private iterateCursor(\n cursorRequest: IDBRequest,\n fn: IterateCallback<KeyType, ValueType>\n ): PersistencePromise<void> {\n const results: Array<PersistencePromise<void>> = [];\n return new PersistencePromise((resolve, reject) => {\n cursorRequest.onerror = (event: Event) => {\n reject((event.target as IDBRequest).error!);\n };\n cursorRequest.onsuccess = (event: Event) => {\n const cursor: IDBCursorWithValue = (event.target as IDBRequest).result;\n if (!cursor) {\n resolve();\n return;\n }\n const controller = new IterationController(cursor);\n const userResult = fn(\n cursor.primaryKey as KeyType,\n cursor.value,\n controller\n );\n if (userResult instanceof PersistencePromise) {\n const userPromise: PersistencePromise<void> = userResult.catch(\n err => {\n controller.done();\n return PersistencePromise.reject(err);\n }\n );\n results.push(userPromise);\n }\n if (controller.isDone) {\n resolve();\n } else if (controller.skipToKey === null) {\n cursor.continue();\n } else {\n cursor.continue(controller.skipToKey);\n }\n };\n }).next(() => {\n return PersistencePromise.waitFor(results);\n });\n }\n\n private options(\n indexOrRange?: string | IDBKeyRange,\n range?: IDBKeyRange\n ): IterateOptions {\n let indexName: string | undefined = undefined;\n if (indexOrRange !== undefined) {\n if (typeof indexOrRange === 'string') {\n indexName = indexOrRange;\n } else {\n debugAssert(\n range === undefined,\n '3rd argument must not be defined if 2nd is a range.'\n );\n range = indexOrRange;\n }\n }\n return { index: indexName, range };\n }\n\n private cursor(options: IterateOptions): IDBRequest {\n let direction: IDBCursorDirection = 'next';\n if (options.reverse) {\n direction = 'prev';\n }\n if (options.index) {\n const index = this.store.index(options.index);\n if (options.keysOnly) {\n return index.openKeyCursor(options.range, direction);\n } else {\n return index.openCursor(options.range, direction);\n }\n } else {\n return this.store.openCursor(options.range, direction);\n }\n }\n}\n\n/**\n * Wraps an IDBRequest in a PersistencePromise, using the onsuccess / onerror\n * handlers to resolve / reject the PersistencePromise as appropriate.\n */\nfunction wrapRequest<R>(request: IDBRequest): PersistencePromise<R> {\n return new PersistencePromise<R>((resolve, reject) => {\n request.onsuccess = (event: Event) => {\n const result = (event.target as IDBRequest).result;\n resolve(result);\n };\n\n request.onerror = (event: Event) => {\n const error = checkForAndReportiOSError(\n (event.target as IDBRequest).error!\n );\n reject(error);\n };\n });\n}\n\n// Guard so we only report the error once.\nlet reportedIOSError = false;\nfunction checkForAndReportiOSError(error: DOMException): Error {\n const iOSVersion = SimpleDb.getIOSVersion(getUA());\n if (iOSVersion >= 12.2 && iOSVersion < 13) {\n const IOS_ERROR =\n 'An internal error was encountered in the Indexed Database server';\n if (error.message.indexOf(IOS_ERROR) >= 0) {\n // Wrap error in a more descriptive one.\n const newError = new FirestoreError(\n 'internal',\n `IOS_INDEXEDDB_BUG1: IndexedDb has thrown '${IOS_ERROR}'. This is likely ` +\n `due to an unavoidable bug in iOS. See https://stackoverflow.com/q/56496296/110915 ` +\n `for details and a potential workaround.`\n );\n if (!reportedIOSError) {\n reportedIOSError = true;\n // Throw a global exception outside of this promise chain, for the user to\n // potentially catch.\n setTimeout(() => {\n throw newError;\n }, 0);\n }\n return newError;\n }\n }\n return error;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { debugAssert, fail } from './assert';\nimport { Code, FirestoreError } from './error';\nimport { logDebug, logError } from './log';\nimport { Deferred } from './promise';\nimport { ExponentialBackoff } from '../remote/backoff';\nimport { PlatformSupport } from '../platform/platform';\nimport { isIndexedDbTransactionError } from '../local/simple_db';\n\nconst LOG_TAG = 'AsyncQueue';\n\n// Accept any return type from setTimeout().\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype TimerHandle = any;\n\n/**\n * Wellknown \"timer\" IDs used when scheduling delayed operations on the\n * AsyncQueue. These IDs can then be used from tests to check for the presence\n * of operations or to run them early.\n *\n * The string values are used when encoding these timer IDs in JSON spec tests.\n */\nexport const enum TimerId {\n /** All can be used with runDelayedOperationsEarly() to run all timers. */\n All = 'all',\n\n /**\n * The following 4 timers are used in persistent_stream.ts for the listen and\n * write streams. The \"Idle\" timer is used to close the stream due to\n * inactivity. The \"ConnectionBackoff\" timer is used to restart a stream once\n * the appropriate backoff delay has elapsed.\n */\n ListenStreamIdle = 'listen_stream_idle',\n ListenStreamConnectionBackoff = 'listen_stream_connection_backoff',\n WriteStreamIdle = 'write_stream_idle',\n WriteStreamConnectionBackoff = 'write_stream_connection_backoff',\n\n /**\n * A timer used in online_state_tracker.ts to transition from\n * OnlineState.Unknown to Offline after a set timeout, rather than waiting\n * indefinitely for success or failure.\n */\n OnlineStateTimeout = 'online_state_timeout',\n\n /**\n * A timer used to update the client metadata in IndexedDb, which is used\n * to determine the primary leaseholder.\n */\n ClientMetadataRefresh = 'client_metadata_refresh',\n\n /** A timer used to periodically attempt LRU Garbage collection */\n LruGarbageCollection = 'lru_garbage_collection',\n\n /**\n * A timer used to retry transactions. Since there can be multiple concurrent\n * transactions, multiple of these may be in the queue at a given time.\n */\n TransactionRetry = 'transaction_retry',\n\n /**\n * A timer used to retry operations scheduled via retryable AsyncQueue\n * operations.\n */\n AsyncQueueRetry = 'async_queue_retry'\n}\n\n/**\n * Represents an operation scheduled to be run in the future on an AsyncQueue.\n *\n * It is created via DelayedOperation.createAndSchedule().\n *\n * Supports cancellation (via cancel()) and early execution (via skipDelay()).\n *\n * Note: We implement `PromiseLike` instead of `Promise`, as the `Promise` type\n * in newer versions of TypeScript defines `finally`, which is not available in\n * IE.\n */\nexport class DelayedOperation<T extends unknown> implements PromiseLike<T> {\n // handle for use with clearTimeout(), or null if the operation has been\n // executed or canceled already.\n private timerHandle: TimerHandle | null;\n\n private readonly deferred = new Deferred<T>();\n\n private constructor(\n private readonly asyncQueue: AsyncQueue,\n readonly timerId: TimerId,\n readonly targetTimeMs: number,\n private readonly op: () => Promise<T>,\n private readonly removalCallback: (op: DelayedOperation<T>) => void\n ) {\n // It's normal for the deferred promise to be canceled (due to cancellation)\n // and so we attach a dummy catch callback to avoid\n // 'UnhandledPromiseRejectionWarning' log spam.\n this.deferred.promise.catch(err => {});\n }\n\n /**\n * Creates and returns a DelayedOperation that has been scheduled to be\n * executed on the provided asyncQueue after the provided delayMs.\n *\n * @param asyncQueue The queue to schedule the operation on.\n * @param id A Timer ID identifying the type of operation this is.\n * @param delayMs The delay (ms) before the operation should be scheduled.\n * @param op The operation to run.\n * @param removalCallback A callback to be called synchronously once the\n * operation is executed or canceled, notifying the AsyncQueue to remove it\n * from its delayedOperations list.\n * PORTING NOTE: This exists to prevent making removeDelayedOperation() and\n * the DelayedOperation class public.\n */\n static createAndSchedule<R extends unknown>(\n asyncQueue: AsyncQueue,\n timerId: TimerId,\n delayMs: number,\n op: () => Promise<R>,\n removalCallback: (op: DelayedOperation<R>) => void\n ): DelayedOperation<R> {\n const targetTime = Date.now() + delayMs;\n const delayedOp = new DelayedOperation(\n asyncQueue,\n timerId,\n targetTime,\n op,\n removalCallback\n );\n delayedOp.start(delayMs);\n return delayedOp;\n }\n\n /**\n * Starts the timer. This is called immediately after construction by\n * createAndSchedule().\n */\n private start(delayMs: number): void {\n this.timerHandle = setTimeout(() => this.handleDelayElapsed(), delayMs);\n }\n\n /**\n * Queues the operation to run immediately (if it hasn't already been run or\n * canceled).\n */\n skipDelay(): void {\n return this.handleDelayElapsed();\n }\n\n /**\n * Cancels the operation if it hasn't already been executed or canceled. The\n * promise will be rejected.\n *\n * As long as the operation has not yet been run, calling cancel() provides a\n * guarantee that the operation will not be run.\n */\n cancel(reason?: string): void {\n if (this.timerHandle !== null) {\n this.clearTimeout();\n this.deferred.reject(\n new FirestoreError(\n Code.CANCELLED,\n 'Operation cancelled' + (reason ? ': ' + reason : '')\n )\n );\n }\n }\n\n then = this.deferred.promise.then.bind(this.deferred.promise);\n\n private handleDelayElapsed(): void {\n this.asyncQueue.enqueueAndForget(() => {\n if (this.timerHandle !== null) {\n this.clearTimeout();\n return this.op().then(result => {\n return this.deferred.resolve(result);\n });\n } else {\n return Promise.resolve();\n }\n });\n }\n\n private clearTimeout(): void {\n if (this.timerHandle !== null) {\n this.removalCallback(this);\n clearTimeout(this.timerHandle);\n this.timerHandle = null;\n }\n }\n}\n\nexport class AsyncQueue {\n // The last promise in the queue.\n private tail: Promise<unknown> = Promise.resolve();\n\n // The last retryable operation. Retryable operation are run in order and\n // retried with backoff.\n private retryableTail: Promise<void> = Promise.resolve();\n\n // Is this AsyncQueue being shut down? Once it is set to true, it will not\n // be changed again.\n private _isShuttingDown: boolean = false;\n\n // Operations scheduled to be queued in the future. Operations are\n // automatically removed after they are run or canceled.\n private delayedOperations: Array<DelayedOperation<unknown>> = [];\n\n // visible for testing\n failure: Error | null = null;\n\n // Flag set while there's an outstanding AsyncQueue operation, used for\n // assertion sanity-checks.\n private operationInProgress = false;\n\n // List of TimerIds to fast-forward delays for.\n private timerIdsToSkip: TimerId[] = [];\n\n // Backoff timer used to schedule retries for retryable operations\n private backoff = new ExponentialBackoff(this, TimerId.AsyncQueueRetry);\n\n // Visibility handler that triggers an immediate retry of all retryable\n // operations. Meant to speed up recovery when we regain file system access\n // after page comes into foreground.\n private visibilityHandler = (): void => this.backoff.skipBackoff();\n\n constructor() {\n const window = PlatformSupport.getPlatform().window;\n if (window && typeof window.addEventListener === 'function') {\n window.addEventListener('visibilitychange', this.visibilityHandler);\n }\n }\n\n // Is this AsyncQueue being shut down? If true, this instance will not enqueue\n // any new operations, Promises from enqueue requests will not resolve.\n get isShuttingDown(): boolean {\n return this._isShuttingDown;\n }\n\n /**\n * Adds a new operation to the queue without waiting for it to complete (i.e.\n * we ignore the Promise result).\n */\n enqueueAndForget<T extends unknown>(op: () => Promise<T>): void {\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.enqueue(op);\n }\n\n /**\n * Regardless if the queue has initialized shutdown, adds a new operation to the\n * queue without waiting for it to complete (i.e. we ignore the Promise result).\n */\n enqueueAndForgetEvenAfterShutdown<T extends unknown>(\n op: () => Promise<T>\n ): void {\n this.verifyNotFailed();\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.enqueueInternal(op);\n }\n\n /**\n * Regardless if the queue has initialized shutdown, adds a new operation to the\n * queue.\n */\n private enqueueEvenAfterShutdown<T extends unknown>(\n op: () => Promise<T>\n ): Promise<T> {\n this.verifyNotFailed();\n return this.enqueueInternal(op);\n }\n\n /**\n * Adds a new operation to the queue and initialize the shut down of this queue.\n * Returns a promise that will be resolved when the promise returned by the new\n * operation is (with its value).\n * Once this method is called, the only possible way to request running an operation\n * is through `enqueueAndForgetEvenAfterShutdown`.\n */\n async enqueueAndInitiateShutdown(op: () => Promise<void>): Promise<void> {\n this.verifyNotFailed();\n if (!this._isShuttingDown) {\n this._isShuttingDown = true;\n const window = PlatformSupport.getPlatform().window;\n if (window) {\n window.removeEventListener('visibilitychange', this.visibilityHandler);\n }\n await this.enqueueEvenAfterShutdown(op);\n }\n }\n\n /**\n * Adds a new operation to the queue. Returns a promise that will be resolved\n * when the promise returned by the new operation is (with its value).\n */\n enqueue<T extends unknown>(op: () => Promise<T>): Promise<T> {\n this.verifyNotFailed();\n if (this._isShuttingDown) {\n // Return a Promise which never resolves.\n return new Promise<T>(resolve => {});\n }\n return this.enqueueInternal(op);\n }\n\n /**\n * Enqueue a retryable operation.\n *\n * A retryable operation is rescheduled with backoff if it fails with a\n * IndexedDbTransactionError (the error type used by SimpleDb). All\n * retryable operations are executed in order and only run if all prior\n * operations were retried successfully.\n */\n enqueueRetryable(op: () => Promise<void>): void {\n this.verifyNotFailed();\n\n if (this._isShuttingDown) {\n return;\n }\n\n this.retryableTail = this.retryableTail.then(() => {\n const deferred = new Deferred<void>();\n const retryingOp = async (): Promise<void> => {\n try {\n await op();\n deferred.resolve();\n this.backoff.reset();\n } catch (e) {\n if (isIndexedDbTransactionError(e)) {\n logDebug(LOG_TAG, 'Operation failed with retryable error: ' + e);\n this.backoff.backoffAndRun(retryingOp);\n } else {\n deferred.resolve();\n throw e; // Failure will be handled by AsyncQueue\n }\n }\n };\n this.enqueueAndForget(retryingOp);\n return deferred.promise;\n });\n }\n\n private enqueueInternal<T extends unknown>(op: () => Promise<T>): Promise<T> {\n const newTail = this.tail.then(() => {\n this.operationInProgress = true;\n return op()\n .catch((error: FirestoreError) => {\n this.failure = error;\n this.operationInProgress = false;\n const message = error.stack || error.message || '';\n logError('INTERNAL UNHANDLED ERROR: ', message);\n\n // Re-throw the error so that this.tail becomes a rejected Promise and\n // all further attempts to chain (via .then) will just short-circuit\n // and return the rejected Promise.\n throw error;\n })\n .then(result => {\n this.operationInProgress = false;\n return result;\n });\n });\n this.tail = newTail;\n return newTail;\n }\n\n /**\n * Schedules an operation to be queued on the AsyncQueue once the specified\n * `delayMs` has elapsed. The returned DelayedOperation can be used to cancel\n * or fast-forward the operation prior to its running.\n */\n enqueueAfterDelay<T extends unknown>(\n timerId: TimerId,\n delayMs: number,\n op: () => Promise<T>\n ): DelayedOperation<T> {\n this.verifyNotFailed();\n\n debugAssert(\n delayMs >= 0,\n `Attempted to schedule an operation with a negative delay of ${delayMs}`\n );\n\n // Fast-forward delays for timerIds that have been overriden.\n if (this.timerIdsToSkip.indexOf(timerId) > -1) {\n delayMs = 0;\n }\n\n const delayedOp = DelayedOperation.createAndSchedule<T>(\n this,\n timerId,\n delayMs,\n op,\n removedOp =>\n this.removeDelayedOperation(removedOp as DelayedOperation<unknown>)\n );\n this.delayedOperations.push(delayedOp as DelayedOperation<unknown>);\n return delayedOp;\n }\n\n private verifyNotFailed(): void {\n if (this.failure) {\n fail(\n 'AsyncQueue is already failed: ' +\n (this.failure.stack || this.failure.message)\n );\n }\n }\n\n /**\n * Verifies there's an operation currently in-progress on the AsyncQueue.\n * Unfortunately we can't verify that the running code is in the promise chain\n * of that operation, so this isn't a foolproof check, but it should be enough\n * to catch some bugs.\n */\n verifyOperationInProgress(): void {\n debugAssert(\n this.operationInProgress,\n 'verifyOpInProgress() called when no op in progress on this queue.'\n );\n }\n\n /**\n * Waits until all currently queued tasks are finished executing. Delayed\n * operations are not run.\n */\n async drain(): Promise<void> {\n // Operations in the queue prior to draining may have enqueued additional\n // operations. Keep draining the queue until the tail is no longer advanced,\n // which indicates that no more new operations were enqueued and that all\n // operations were executed.\n let currentTail: Promise<unknown>;\n do {\n currentTail = this.tail;\n await currentTail;\n } while (currentTail !== this.tail);\n }\n\n /**\n * For Tests: Determine if a delayed operation with a particular TimerId\n * exists.\n */\n containsDelayedOperation(timerId: TimerId): boolean {\n for (const op of this.delayedOperations) {\n if (op.timerId === timerId) {\n return true;\n }\n }\n return false;\n }\n\n /**\n * For Tests: Runs some or all delayed operations early.\n *\n * @param lastTimerId Delayed operations up to and including this TimerId will\n * be drained. Pass TimerId.All to run all delayed operations.\n * @returns a Promise that resolves once all operations have been run.\n */\n runAllDelayedOperationsUntil(lastTimerId: TimerId): Promise<void> {\n // Note that draining may generate more delayed ops, so we do that first.\n return this.drain().then(() => {\n // Run ops in the same order they'd run if they ran naturally.\n this.delayedOperations.sort((a, b) => a.targetTimeMs - b.targetTimeMs);\n\n for (const op of this.delayedOperations) {\n op.skipDelay();\n if (lastTimerId !== TimerId.All && op.timerId === lastTimerId) {\n break;\n }\n }\n\n return this.drain();\n });\n }\n\n /**\n * For Tests: Skip all subsequent delays for a timer id.\n */\n skipDelaysForTimerId(timerId: TimerId): void {\n this.timerIdsToSkip.push(timerId);\n }\n\n /** Called once a DelayedOperation is run or canceled. */\n private removeDelayedOperation(op: DelayedOperation<unknown>): void {\n // NOTE: indexOf / slice are O(n), but delayedOperations is expected to be small.\n const index = this.delayedOperations.indexOf(op);\n debugAssert(index >= 0, 'Delayed operation not found.');\n this.delayedOperations.splice(index, 1);\n }\n}\n\n/**\n * Returns a FirestoreError that can be surfaced to the user if the provided\n * error is an IndexedDbTransactionError. Re-throws the error otherwise.\n */\nexport function wrapInUserErrorIfRecoverable(\n e: Error,\n msg: string\n): FirestoreError {\n logError(LOG_TAG, `${msg}: ${e}`);\n if (isIndexedDbTransactionError(e)) {\n return new FirestoreError(Code.UNAVAILABLE, `${msg}: ${e}`);\n } else {\n throw e;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ListenSequence } from '../core/listen_sequence';\nimport { ListenSequenceNumber, TargetId } from '../core/types';\nimport { debugAssert } from '../util/assert';\nimport { AsyncQueue, DelayedOperation, TimerId } from '../util/async_queue';\nimport { getLogLevel, logDebug, LogLevel } from '../util/log';\nimport { primitiveComparator } from '../util/misc';\nimport { SortedMap } from '../util/sorted_map';\nimport { SortedSet } from '../util/sorted_set';\nimport { ignoreIfPrimaryLeaseLoss, LocalStore } from './local_store';\nimport {\n GarbageCollectionScheduler,\n PersistenceTransaction\n} from './persistence';\nimport { PersistencePromise } from './persistence_promise';\nimport { TargetData } from './target_data';\nimport { isIndexedDbTransactionError } from './simple_db';\n\nconst LOG_TAG = 'LruGarbageCollector';\n\n/**\n * Persistence layers intending to use LRU Garbage collection should have reference delegates that\n * implement this interface. This interface defines the operations that the LRU garbage collector\n * needs from the persistence layer.\n */\nexport interface LruDelegate {\n readonly garbageCollector: LruGarbageCollector;\n\n /** Enumerates all the targets in the TargetCache. */\n forEachTarget(\n txn: PersistenceTransaction,\n f: (target: TargetData) => void\n ): PersistencePromise<void>;\n\n getSequenceNumberCount(\n txn: PersistenceTransaction\n ): PersistencePromise<number>;\n\n /**\n * Enumerates sequence numbers for documents not associated with a target.\n * Note that this may include duplicate sequence numbers.\n */\n forEachOrphanedDocumentSequenceNumber(\n txn: PersistenceTransaction,\n f: (sequenceNumber: ListenSequenceNumber) => void\n ): PersistencePromise<void>;\n\n /**\n * Removes all targets that have a sequence number less than or equal to `upperBound`, and are not\n * present in the `activeTargetIds` set.\n *\n * @return the number of targets removed.\n */\n removeTargets(\n txn: PersistenceTransaction,\n upperBound: ListenSequenceNumber,\n activeTargetIds: ActiveTargets\n ): PersistencePromise<number>;\n\n /**\n * Removes all unreferenced documents from the cache that have a sequence number less than or\n * equal to the given `upperBound`.\n *\n * @return the number of documents removed.\n */\n removeOrphanedDocuments(\n txn: PersistenceTransaction,\n upperBound: ListenSequenceNumber\n ): PersistencePromise<number>;\n\n getCacheSize(txn: PersistenceTransaction): PersistencePromise<number>;\n}\n\n/**\n * Describes a map whose keys are active target ids. We do not care about the type of the\n * values.\n */\nexport type ActiveTargets = SortedMap<TargetId, unknown>;\n\n// The type and comparator for the items contained in the SortedSet used in\n// place of a priority queue for the RollingSequenceNumberBuffer.\ntype BufferEntry = [ListenSequenceNumber, number];\nfunction bufferEntryComparator(\n [aSequence, aIndex]: BufferEntry,\n [bSequence, bIndex]: BufferEntry\n): number {\n const seqCmp = primitiveComparator(aSequence, bSequence);\n if (seqCmp === 0) {\n // This order doesn't matter, but we can bias against churn by sorting\n // entries created earlier as less than newer entries.\n return primitiveComparator(aIndex, bIndex);\n } else {\n return seqCmp;\n }\n}\n\n/**\n * Used to calculate the nth sequence number. Keeps a rolling buffer of the\n * lowest n values passed to `addElement`, and finally reports the largest of\n * them in `maxValue`.\n */\nclass RollingSequenceNumberBuffer {\n private buffer: SortedSet<BufferEntry> = new SortedSet<BufferEntry>(\n bufferEntryComparator\n );\n\n private previousIndex = 0;\n\n constructor(private readonly maxElements: number) {}\n\n private nextIndex(): number {\n return ++this.previousIndex;\n }\n\n addElement(sequenceNumber: ListenSequenceNumber): void {\n const entry: BufferEntry = [sequenceNumber, this.nextIndex()];\n if (this.buffer.size < this.maxElements) {\n this.buffer = this.buffer.add(entry);\n } else {\n const highestValue = this.buffer.last()!;\n if (bufferEntryComparator(entry, highestValue) < 0) {\n this.buffer = this.buffer.delete(highestValue).add(entry);\n }\n }\n }\n\n get maxValue(): ListenSequenceNumber {\n // Guaranteed to be non-empty. If we decide we are not collecting any\n // sequence numbers, nthSequenceNumber below short-circuits. If we have\n // decided that we are collecting n sequence numbers, it's because n is some\n // percentage of the existing sequence numbers. That means we should never\n // be in a situation where we are collecting sequence numbers but don't\n // actually have any.\n return this.buffer.last()![0];\n }\n}\n\n/**\n * Describes the results of a garbage collection run. `didRun` will be set to\n * `false` if collection was skipped (either it is disabled or the cache size\n * has not hit the threshold). If collection ran, the other fields will be\n * filled in with the details of the results.\n */\nexport interface LruResults {\n readonly didRun: boolean;\n readonly sequenceNumbersCollected: number;\n readonly targetsRemoved: number;\n readonly documentsRemoved: number;\n}\n\nconst GC_DID_NOT_RUN: LruResults = {\n didRun: false,\n sequenceNumbersCollected: 0,\n targetsRemoved: 0,\n documentsRemoved: 0\n};\n\nexport class LruParams {\n static readonly COLLECTION_DISABLED = -1;\n static readonly MINIMUM_CACHE_SIZE_BYTES = 1 * 1024 * 1024;\n static readonly DEFAULT_CACHE_SIZE_BYTES = 40 * 1024 * 1024;\n private static readonly DEFAULT_COLLECTION_PERCENTILE = 10;\n private static readonly DEFAULT_MAX_SEQUENCE_NUMBERS_TO_COLLECT = 1000;\n\n static withCacheSize(cacheSize: number): LruParams {\n return new LruParams(\n cacheSize,\n LruParams.DEFAULT_COLLECTION_PERCENTILE,\n LruParams.DEFAULT_MAX_SEQUENCE_NUMBERS_TO_COLLECT\n );\n }\n\n static readonly DEFAULT: LruParams = new LruParams(\n LruParams.DEFAULT_CACHE_SIZE_BYTES,\n LruParams.DEFAULT_COLLECTION_PERCENTILE,\n LruParams.DEFAULT_MAX_SEQUENCE_NUMBERS_TO_COLLECT\n );\n\n static readonly DISABLED: LruParams = new LruParams(\n LruParams.COLLECTION_DISABLED,\n 0,\n 0\n );\n\n constructor(\n // When we attempt to collect, we will only do so if the cache size is greater than this\n // threshold. Passing `COLLECTION_DISABLED` here will cause collection to always be skipped.\n readonly cacheSizeCollectionThreshold: number,\n // The percentage of sequence numbers that we will attempt to collect\n readonly percentileToCollect: number,\n // A cap on the total number of sequence numbers that will be collected. This prevents\n // us from collecting a huge number of sequence numbers if the cache has grown very large.\n readonly maximumSequenceNumbersToCollect: number\n ) {}\n}\n\n/** How long we wait to try running LRU GC after SDK initialization. */\nconst INITIAL_GC_DELAY_MS = 1 * 60 * 1000;\n/** Minimum amount of time between GC checks, after the first one. */\nconst REGULAR_GC_DELAY_MS = 5 * 60 * 1000;\n\n/**\n * This class is responsible for the scheduling of LRU garbage collection. It handles checking\n * whether or not GC is enabled, as well as which delay to use before the next run.\n */\nexport class LruScheduler implements GarbageCollectionScheduler {\n private hasRun: boolean = false;\n private gcTask: DelayedOperation<void> | null;\n\n constructor(\n private readonly garbageCollector: LruGarbageCollector,\n private readonly asyncQueue: AsyncQueue\n ) {\n this.gcTask = null;\n }\n\n start(localStore: LocalStore): void {\n debugAssert(\n this.gcTask === null,\n 'Cannot start an already started LruScheduler'\n );\n if (\n this.garbageCollector.params.cacheSizeCollectionThreshold !==\n LruParams.COLLECTION_DISABLED\n ) {\n this.scheduleGC(localStore);\n }\n }\n\n stop(): void {\n if (this.gcTask) {\n this.gcTask.cancel();\n this.gcTask = null;\n }\n }\n\n get started(): boolean {\n return this.gcTask !== null;\n }\n\n private scheduleGC(localStore: LocalStore): void {\n debugAssert(\n this.gcTask === null,\n 'Cannot schedule GC while a task is pending'\n );\n const delay = this.hasRun ? REGULAR_GC_DELAY_MS : INITIAL_GC_DELAY_MS;\n logDebug(\n 'LruGarbageCollector',\n `Garbage collection scheduled in ${delay}ms`\n );\n this.gcTask = this.asyncQueue.enqueueAfterDelay(\n TimerId.LruGarbageCollection,\n delay,\n async () => {\n this.gcTask = null;\n this.hasRun = true;\n try {\n await localStore.collectGarbage(this.garbageCollector);\n } catch (e) {\n if (isIndexedDbTransactionError(e)) {\n logDebug(\n LOG_TAG,\n 'Ignoring IndexedDB error during garbage collection: ',\n e\n );\n } else {\n await ignoreIfPrimaryLeaseLoss(e);\n }\n }\n await this.scheduleGC(localStore);\n }\n );\n }\n}\n\n/** Implements the steps for LRU garbage collection. */\nexport class LruGarbageCollector {\n constructor(\n private readonly delegate: LruDelegate,\n readonly params: LruParams\n ) {}\n\n /** Given a percentile of target to collect, returns the number of targets to collect. */\n calculateTargetCount(\n txn: PersistenceTransaction,\n percentile: number\n ): PersistencePromise<number> {\n return this.delegate.getSequenceNumberCount(txn).next(targetCount => {\n return Math.floor((percentile / 100.0) * targetCount);\n });\n }\n\n /** Returns the nth sequence number, counting in order from the smallest. */\n nthSequenceNumber(\n txn: PersistenceTransaction,\n n: number\n ): PersistencePromise<ListenSequenceNumber> {\n if (n === 0) {\n return PersistencePromise.resolve(ListenSequence.INVALID);\n }\n\n const buffer = new RollingSequenceNumberBuffer(n);\n return this.delegate\n .forEachTarget(txn, target => buffer.addElement(target.sequenceNumber))\n .next(() => {\n return this.delegate.forEachOrphanedDocumentSequenceNumber(\n txn,\n sequenceNumber => buffer.addElement(sequenceNumber)\n );\n })\n .next(() => buffer.maxValue);\n }\n\n /**\n * Removes targets with a sequence number equal to or less than the given upper bound, and removes\n * document associations with those targets.\n */\n removeTargets(\n txn: PersistenceTransaction,\n upperBound: ListenSequenceNumber,\n activeTargetIds: ActiveTargets\n ): PersistencePromise<number> {\n return this.delegate.removeTargets(txn, upperBound, activeTargetIds);\n }\n\n /**\n * Removes documents that have a sequence number equal to or less than the upper bound and are not\n * otherwise pinned.\n */\n removeOrphanedDocuments(\n txn: PersistenceTransaction,\n upperBound: ListenSequenceNumber\n ): PersistencePromise<number> {\n return this.delegate.removeOrphanedDocuments(txn, upperBound);\n }\n\n collect(\n txn: PersistenceTransaction,\n activeTargetIds: ActiveTargets\n ): PersistencePromise<LruResults> {\n if (\n this.params.cacheSizeCollectionThreshold === LruParams.COLLECTION_DISABLED\n ) {\n logDebug('LruGarbageCollector', 'Garbage collection skipped; disabled');\n return PersistencePromise.resolve(GC_DID_NOT_RUN);\n }\n\n return this.getCacheSize(txn).next(cacheSize => {\n if (cacheSize < this.params.cacheSizeCollectionThreshold) {\n logDebug(\n 'LruGarbageCollector',\n `Garbage collection skipped; Cache size ${cacheSize} ` +\n `is lower than threshold ${this.params.cacheSizeCollectionThreshold}`\n );\n return GC_DID_NOT_RUN;\n } else {\n return this.runGarbageCollection(txn, activeTargetIds);\n }\n });\n }\n\n getCacheSize(txn: PersistenceTransaction): PersistencePromise<number> {\n return this.delegate.getCacheSize(txn);\n }\n\n private runGarbageCollection(\n txn: PersistenceTransaction,\n activeTargetIds: ActiveTargets\n ): PersistencePromise<LruResults> {\n let upperBoundSequenceNumber: number;\n let sequenceNumbersToCollect: number, targetsRemoved: number;\n // Timestamps for various pieces of the process\n let countedTargetsTs: number,\n foundUpperBoundTs: number,\n removedTargetsTs: number,\n removedDocumentsTs: number;\n const startTs = Date.now();\n return this.calculateTargetCount(txn, this.params.percentileToCollect)\n .next(sequenceNumbers => {\n // Cap at the configured max\n if (sequenceNumbers > this.params.maximumSequenceNumbersToCollect) {\n logDebug(\n 'LruGarbageCollector',\n 'Capping sequence numbers to collect down ' +\n `to the maximum of ${this.params.maximumSequenceNumbersToCollect} ` +\n `from ${sequenceNumbers}`\n );\n sequenceNumbersToCollect = this.params\n .maximumSequenceNumbersToCollect;\n } else {\n sequenceNumbersToCollect = sequenceNumbers;\n }\n countedTargetsTs = Date.now();\n\n return this.nthSequenceNumber(txn, sequenceNumbersToCollect);\n })\n .next(upperBound => {\n upperBoundSequenceNumber = upperBound;\n foundUpperBoundTs = Date.now();\n\n return this.removeTargets(\n txn,\n upperBoundSequenceNumber,\n activeTargetIds\n );\n })\n .next(numTargetsRemoved => {\n targetsRemoved = numTargetsRemoved;\n removedTargetsTs = Date.now();\n\n return this.removeOrphanedDocuments(txn, upperBoundSequenceNumber);\n })\n .next(documentsRemoved => {\n removedDocumentsTs = Date.now();\n\n if (getLogLevel() <= LogLevel.DEBUG) {\n const desc =\n 'LRU Garbage Collection\\n' +\n `\\tCounted targets in ${countedTargetsTs - startTs}ms\\n` +\n `\\tDetermined least recently used ${sequenceNumbersToCollect} in ` +\n `${foundUpperBoundTs - countedTargetsTs}ms\\n` +\n `\\tRemoved ${targetsRemoved} targets in ` +\n `${removedTargetsTs - foundUpperBoundTs}ms\\n` +\n `\\tRemoved ${documentsRemoved} documents in ` +\n `${removedDocumentsTs - removedTargetsTs}ms\\n` +\n `Total Duration: ${removedDocumentsTs - startTs}ms`;\n logDebug('LruGarbageCollector', desc);\n }\n\n return PersistencePromise.resolve<LruResults>({\n didRun: true,\n sequenceNumbersCollected: sequenceNumbersToCollect,\n targetsRemoved,\n documentsRemoved\n });\n });\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Timestamp } from '../api/timestamp';\nimport { User } from '../auth/user';\nimport { Query } from '../core/query';\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { Target } from '../core/target';\nimport { BatchId, TargetId } from '../core/types';\nimport {\n DocumentKeySet,\n documentKeySet,\n DocumentMap,\n maybeDocumentMap,\n MaybeDocumentMap\n} from '../model/collections';\nimport { MaybeDocument, NoDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { Mutation, PatchMutation, Precondition } from '../model/mutation';\nimport {\n BATCHID_UNKNOWN,\n MutationBatch,\n MutationBatchResult\n} from '../model/mutation_batch';\nimport { RemoteEvent, TargetChange } from '../remote/remote_event';\nimport { hardAssert, debugAssert } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport { logDebug } from '../util/log';\nimport { primitiveComparator } from '../util/misc';\nimport { ObjectMap } from '../util/obj_map';\nimport { SortedMap } from '../util/sorted_map';\n\nimport { LocalDocumentsView } from './local_documents_view';\nimport { LocalViewChanges } from './local_view_changes';\nimport { LruGarbageCollector, LruResults } from './lru_garbage_collector';\nimport { MutationQueue } from './mutation_queue';\nimport {\n Persistence,\n PersistenceTransaction,\n PRIMARY_LEASE_LOST_ERROR_MSG\n} from './persistence';\nimport { PersistencePromise } from './persistence_promise';\nimport { TargetCache } from './target_cache';\nimport { QueryEngine } from './query_engine';\nimport { RemoteDocumentCache } from './remote_document_cache';\nimport { RemoteDocumentChangeBuffer } from './remote_document_change_buffer';\nimport { ClientId } from './shared_client_state';\nimport { TargetData, TargetPurpose } from './target_data';\nimport { ByteString } from '../util/byte_string';\nimport { IndexedDbPersistence } from './indexeddb_persistence';\nimport { IndexedDbMutationQueue } from './indexeddb_mutation_queue';\nimport { IndexedDbRemoteDocumentCache } from './indexeddb_remote_document_cache';\nimport { IndexedDbTargetCache } from './indexeddb_target_cache';\nimport { extractFieldMask } from '../model/object_value';\nimport { isIndexedDbTransactionError } from './simple_db';\n\nconst LOG_TAG = 'LocalStore';\n\n/** The result of a write to the local store. */\nexport interface LocalWriteResult {\n batchId: BatchId;\n changes: MaybeDocumentMap;\n}\n\n/** The result of a user-change operation in the local store. */\nexport interface UserChangeResult {\n readonly affectedDocuments: MaybeDocumentMap;\n readonly removedBatchIds: BatchId[];\n readonly addedBatchIds: BatchId[];\n}\n\n/** The result of executing a query against the local store. */\nexport interface QueryResult {\n readonly documents: DocumentMap;\n readonly remoteKeys: DocumentKeySet;\n}\n\n/**\n * Local storage in the Firestore client. Coordinates persistence components\n * like the mutation queue and remote document cache to present a\n * latency-compensated view of stored data.\n *\n * The LocalStore is responsible for accepting mutations from the Sync Engine.\n * Writes from the client are put into a queue as provisional Mutations until\n * they are processed by the RemoteStore and confirmed as having been written\n * to the server.\n *\n * The local store provides the local version of documents that have been\n * modified locally. It maintains the constraint:\n *\n * LocalDocument = RemoteDocument + Active(LocalMutations)\n *\n * (Active mutations are those that are enqueued and have not been previously\n * acknowledged or rejected).\n *\n * The RemoteDocument (\"ground truth\") state is provided via the\n * applyChangeBatch method. It will be some version of a server-provided\n * document OR will be a server-provided document PLUS acknowledged mutations:\n *\n * RemoteDocument' = RemoteDocument + Acknowledged(LocalMutations)\n *\n * Note that this \"dirty\" version of a RemoteDocument will not be identical to a\n * server base version, since it has LocalMutations added to it pending getting\n * an authoritative copy from the server.\n *\n * Since LocalMutations can be rejected by the server, we have to be able to\n * revert a LocalMutation that has already been applied to the LocalDocument\n * (typically done by replaying all remaining LocalMutations to the\n * RemoteDocument to re-apply).\n *\n * The LocalStore is responsible for the garbage collection of the documents it\n * contains. For now, it every doc referenced by a view, the mutation queue, or\n * the RemoteStore.\n *\n * It also maintains the persistence of mapping queries to resume tokens and\n * target ids. It needs to know this data about queries to properly know what\n * docs it would be allowed to garbage collect.\n *\n * The LocalStore must be able to efficiently execute queries against its local\n * cache of the documents, to provide the initial set of results before any\n * remote changes have been received.\n *\n * Note: In TypeScript, most methods return Promises since the implementation\n * may rely on fetching data from IndexedDB which is async.\n * These Promises will only be rejected on an I/O error or other internal\n * (unexpected) failure (e.g. failed assert) and always represent an\n * unrecoverable error (should be caught / reported by the async_queue).\n */\nexport class LocalStore {\n /**\n * The maximum time to leave a resume token buffered without writing it out.\n * This value is arbitrary: it's long enough to avoid several writes\n * (possibly indefinitely if updates come more frequently than this) but\n * short enough that restarting after crashing will still have a pretty\n * recent resume token.\n */\n private static readonly RESUME_TOKEN_MAX_AGE_MICROS = 5 * 60 * 1e6;\n\n /**\n * The set of all mutations that have been sent but not yet been applied to\n * the backend.\n */\n protected mutationQueue: MutationQueue;\n\n /** The set of all cached remote documents. */\n protected remoteDocuments: RemoteDocumentCache;\n\n /**\n * The \"local\" view of all documents (layering mutationQueue on top of\n * remoteDocumentCache).\n */\n protected localDocuments: LocalDocumentsView;\n\n /** Maps a target to its `TargetData`. */\n protected targetCache: TargetCache;\n\n /**\n * Maps a targetID to data about its target.\n *\n * PORTING NOTE: We are using an immutable data structure on Web to make re-runs\n * of `applyRemoteEvent()` idempotent.\n */\n protected targetDataByTarget = new SortedMap<TargetId, TargetData>(\n primitiveComparator\n );\n\n /** Maps a target to its targetID. */\n // TODO(wuandy): Evaluate if TargetId can be part of Target.\n private targetIdByTarget = new ObjectMap<Target, TargetId>(t =>\n t.canonicalId()\n );\n\n /**\n * The read time of the last entry processed by `getNewDocumentChanges()`.\n *\n * PORTING NOTE: This is only used for multi-tab synchronization.\n */\n protected lastDocumentChangeReadTime = SnapshotVersion.min();\n\n constructor(\n /** Manages our in-memory or durable persistence. */\n protected persistence: Persistence,\n private queryEngine: QueryEngine,\n initialUser: User\n ) {\n debugAssert(\n persistence.started,\n 'LocalStore was passed an unstarted persistence implementation'\n );\n this.mutationQueue = persistence.getMutationQueue(initialUser);\n this.remoteDocuments = persistence.getRemoteDocumentCache();\n this.targetCache = persistence.getTargetCache();\n this.localDocuments = new LocalDocumentsView(\n this.remoteDocuments,\n this.mutationQueue,\n this.persistence.getIndexManager()\n );\n this.queryEngine.setLocalDocumentsView(this.localDocuments);\n }\n\n /** Starts the LocalStore. */\n start(): Promise<void> {\n return Promise.resolve();\n }\n\n /**\n * Tells the LocalStore that the currently authenticated user has changed.\n *\n * In response the local store switches the mutation queue to the new user and\n * returns any resulting document changes.\n */\n // PORTING NOTE: Android and iOS only return the documents affected by the\n // change.\n async handleUserChange(user: User): Promise<UserChangeResult> {\n let newMutationQueue = this.mutationQueue;\n let newLocalDocuments = this.localDocuments;\n\n const result = await this.persistence.runTransaction(\n 'Handle user change',\n 'readonly',\n txn => {\n // Swap out the mutation queue, grabbing the pending mutation batches\n // before and after.\n let oldBatches: MutationBatch[];\n return this.mutationQueue\n .getAllMutationBatches(txn)\n .next(promisedOldBatches => {\n oldBatches = promisedOldBatches;\n\n newMutationQueue = this.persistence.getMutationQueue(user);\n\n // Recreate our LocalDocumentsView using the new\n // MutationQueue.\n newLocalDocuments = new LocalDocumentsView(\n this.remoteDocuments,\n newMutationQueue,\n this.persistence.getIndexManager()\n );\n return newMutationQueue.getAllMutationBatches(txn);\n })\n .next(newBatches => {\n const removedBatchIds: BatchId[] = [];\n const addedBatchIds: BatchId[] = [];\n\n // Union the old/new changed keys.\n let changedKeys = documentKeySet();\n\n for (const batch of oldBatches) {\n removedBatchIds.push(batch.batchId);\n for (const mutation of batch.mutations) {\n changedKeys = changedKeys.add(mutation.key);\n }\n }\n\n for (const batch of newBatches) {\n addedBatchIds.push(batch.batchId);\n for (const mutation of batch.mutations) {\n changedKeys = changedKeys.add(mutation.key);\n }\n }\n\n // Return the set of all (potentially) changed documents and the list\n // of mutation batch IDs that were affected by change.\n return newLocalDocuments\n .getDocuments(txn, changedKeys)\n .next(affectedDocuments => {\n return {\n affectedDocuments,\n removedBatchIds,\n addedBatchIds\n };\n });\n });\n }\n );\n\n this.mutationQueue = newMutationQueue;\n this.localDocuments = newLocalDocuments;\n this.queryEngine.setLocalDocumentsView(this.localDocuments);\n\n return result;\n }\n\n /* Accept locally generated Mutations and commit them to storage. */\n localWrite(mutations: Mutation[]): Promise<LocalWriteResult> {\n const localWriteTime = Timestamp.now();\n const keys = mutations.reduce(\n (keys, m) => keys.add(m.key),\n documentKeySet()\n );\n\n let existingDocs: MaybeDocumentMap;\n\n return this.persistence\n .runTransaction('Locally write mutations', 'readwrite', txn => {\n // Load and apply all existing mutations. This lets us compute the\n // current base state for all non-idempotent transforms before applying\n // any additional user-provided writes.\n return this.localDocuments.getDocuments(txn, keys).next(docs => {\n existingDocs = docs;\n\n // For non-idempotent mutations (such as `FieldValue.increment()`),\n // we record the base state in a separate patch mutation. This is\n // later used to guarantee consistent values and prevents flicker\n // even if the backend sends us an update that already includes our\n // transform.\n const baseMutations: Mutation[] = [];\n\n for (const mutation of mutations) {\n const baseValue = mutation.extractBaseValue(\n existingDocs.get(mutation.key)\n );\n if (baseValue != null) {\n // NOTE: The base state should only be applied if there's some\n // existing document to override, so use a Precondition of\n // exists=true\n baseMutations.push(\n new PatchMutation(\n mutation.key,\n baseValue,\n extractFieldMask(baseValue.proto.mapValue!),\n Precondition.exists(true)\n )\n );\n }\n }\n\n return this.mutationQueue.addMutationBatch(\n txn,\n localWriteTime,\n baseMutations,\n mutations\n );\n });\n })\n .then(batch => {\n const changes = batch.applyToLocalDocumentSet(existingDocs);\n return { batchId: batch.batchId, changes };\n });\n }\n\n /**\n * Acknowledge the given batch.\n *\n * On the happy path when a batch is acknowledged, the local store will\n *\n * + remove the batch from the mutation queue;\n * + apply the changes to the remote document cache;\n * + recalculate the latency compensated view implied by those changes (there\n * may be mutations in the queue that affect the documents but haven't been\n * acknowledged yet); and\n * + give the changed documents back the sync engine\n *\n * @returns The resulting (modified) documents.\n */\n acknowledgeBatch(\n batchResult: MutationBatchResult\n ): Promise<MaybeDocumentMap> {\n return this.persistence.runTransaction(\n 'Acknowledge batch',\n 'readwrite-primary',\n txn => {\n const affected = batchResult.batch.keys();\n const documentBuffer = this.remoteDocuments.newChangeBuffer({\n trackRemovals: true // Make sure document removals show up in `getNewDocumentChanges()`\n });\n return this.mutationQueue\n .acknowledgeBatch(txn, batchResult.batch, batchResult.streamToken)\n .next(() =>\n this.applyWriteToRemoteDocuments(txn, batchResult, documentBuffer)\n )\n .next(() => documentBuffer.apply(txn))\n .next(() => this.mutationQueue.performConsistencyCheck(txn))\n .next(() => this.localDocuments.getDocuments(txn, affected));\n }\n );\n }\n\n /**\n * Remove mutations from the MutationQueue for the specified batch;\n * LocalDocuments will be recalculated.\n *\n * @returns The resulting modified documents.\n */\n rejectBatch(batchId: BatchId): Promise<MaybeDocumentMap> {\n return this.persistence.runTransaction(\n 'Reject batch',\n 'readwrite-primary',\n txn => {\n let affectedKeys: DocumentKeySet;\n return this.mutationQueue\n .lookupMutationBatch(txn, batchId)\n .next((batch: MutationBatch | null) => {\n hardAssert(batch !== null, 'Attempt to reject nonexistent batch!');\n affectedKeys = batch.keys();\n return this.mutationQueue.removeMutationBatch(txn, batch);\n })\n .next(() => {\n return this.mutationQueue.performConsistencyCheck(txn);\n })\n .next(() => {\n return this.localDocuments.getDocuments(txn, affectedKeys);\n });\n }\n );\n }\n\n /**\n * Returns the largest (latest) batch id in mutation queue that is pending server response.\n * Returns `BATCHID_UNKNOWN` if the queue is empty.\n */\n getHighestUnacknowledgedBatchId(): Promise<BatchId> {\n return this.persistence.runTransaction(\n 'Get highest unacknowledged batch id',\n 'readonly',\n txn => {\n return this.mutationQueue.getHighestUnacknowledgedBatchId(txn);\n }\n );\n }\n\n /** Returns the last recorded stream token for the current user. */\n getLastStreamToken(): Promise<ByteString> {\n return this.persistence.runTransaction(\n 'Get last stream token',\n 'readonly',\n txn => {\n return this.mutationQueue.getLastStreamToken(txn);\n }\n );\n }\n\n /**\n * Sets the stream token for the current user without acknowledging any\n * mutation batch. This is usually only useful after a stream handshake or in\n * response to an error that requires clearing the stream token.\n */\n setLastStreamToken(streamToken: ByteString): Promise<void> {\n return this.persistence.runTransaction(\n 'Set last stream token',\n 'readwrite-primary',\n txn => {\n return this.mutationQueue.setLastStreamToken(txn, streamToken);\n }\n );\n }\n\n /**\n * Returns the last consistent snapshot processed (used by the RemoteStore to\n * determine whether to buffer incoming snapshots from the backend).\n */\n getLastRemoteSnapshotVersion(): Promise<SnapshotVersion> {\n return this.persistence.runTransaction(\n 'Get last remote snapshot version',\n 'readonly',\n txn => this.targetCache.getLastRemoteSnapshotVersion(txn)\n );\n }\n\n /**\n * Update the \"ground-state\" (remote) documents. We assume that the remote\n * event reflects any write batches that have been acknowledged or rejected\n * (i.e. we do not re-apply local mutations to updates from this event).\n *\n * LocalDocuments are re-calculated if there are remaining mutations in the\n * queue.\n */\n applyRemoteEvent(remoteEvent: RemoteEvent): Promise<MaybeDocumentMap> {\n const remoteVersion = remoteEvent.snapshotVersion;\n let newTargetDataByTargetMap = this.targetDataByTarget;\n\n return this.persistence\n .runTransaction('Apply remote event', 'readwrite-primary', txn => {\n const documentBuffer = this.remoteDocuments.newChangeBuffer({\n trackRemovals: true // Make sure document removals show up in `getNewDocumentChanges()`\n });\n\n // Reset newTargetDataByTargetMap in case this transaction gets re-run.\n newTargetDataByTargetMap = this.targetDataByTarget;\n\n const promises = [] as Array<PersistencePromise<void>>;\n remoteEvent.targetChanges.forEach((change, targetId) => {\n const oldTargetData = newTargetDataByTargetMap.get(targetId);\n if (!oldTargetData) {\n return;\n }\n\n // Only update the remote keys if the target is still active. This\n // ensures that we can persist the updated target data along with\n // the updated assignment.\n promises.push(\n this.targetCache\n .removeMatchingKeys(txn, change.removedDocuments, targetId)\n .next(() => {\n return this.targetCache.addMatchingKeys(\n txn,\n change.addedDocuments,\n targetId\n );\n })\n );\n\n const resumeToken = change.resumeToken;\n // Update the resume token if the change includes one.\n if (resumeToken.approximateByteSize() > 0) {\n const newTargetData = oldTargetData\n .withResumeToken(resumeToken, remoteVersion)\n .withSequenceNumber(txn.currentSequenceNumber);\n newTargetDataByTargetMap = newTargetDataByTargetMap.insert(\n targetId,\n newTargetData\n );\n\n // Update the target data if there are target changes (or if\n // sufficient time has passed since the last update).\n if (\n LocalStore.shouldPersistTargetData(\n oldTargetData,\n newTargetData,\n change\n )\n ) {\n promises.push(\n this.targetCache.updateTargetData(txn, newTargetData)\n );\n }\n }\n });\n\n let changedDocs = maybeDocumentMap();\n let updatedKeys = documentKeySet();\n remoteEvent.documentUpdates.forEach((key, doc) => {\n updatedKeys = updatedKeys.add(key);\n });\n\n // Each loop iteration only affects its \"own\" doc, so it's safe to get all the remote\n // documents in advance in a single call.\n promises.push(\n documentBuffer.getEntries(txn, updatedKeys).next(existingDocs => {\n remoteEvent.documentUpdates.forEach((key, doc) => {\n const existingDoc = existingDocs.get(key);\n\n // Note: The order of the steps below is important, since we want\n // to ensure that rejected limbo resolutions (which fabricate\n // NoDocuments with SnapshotVersion.min()) never add documents to\n // cache.\n if (\n doc instanceof NoDocument &&\n doc.version.isEqual(SnapshotVersion.min())\n ) {\n // NoDocuments with SnapshotVersion.min() are used in manufactured\n // events. We remove these documents from cache since we lost\n // access.\n documentBuffer.removeEntry(key, remoteVersion);\n changedDocs = changedDocs.insert(key, doc);\n } else if (\n existingDoc == null ||\n doc.version.compareTo(existingDoc.version) > 0 ||\n (doc.version.compareTo(existingDoc.version) === 0 &&\n existingDoc.hasPendingWrites)\n ) {\n debugAssert(\n !SnapshotVersion.min().isEqual(remoteVersion),\n 'Cannot add a document when the remote version is zero'\n );\n documentBuffer.addEntry(doc, remoteVersion);\n changedDocs = changedDocs.insert(key, doc);\n } else {\n logDebug(\n LOG_TAG,\n 'Ignoring outdated watch update for ',\n key,\n '. Current version:',\n existingDoc.version,\n ' Watch version:',\n doc.version\n );\n }\n\n if (remoteEvent.resolvedLimboDocuments.has(key)) {\n promises.push(\n this.persistence.referenceDelegate.updateLimboDocument(\n txn,\n key\n )\n );\n }\n });\n })\n );\n\n // HACK: The only reason we allow a null snapshot version is so that we\n // can synthesize remote events when we get permission denied errors while\n // trying to resolve the state of a locally cached document that is in\n // limbo.\n if (!remoteVersion.isEqual(SnapshotVersion.min())) {\n const updateRemoteVersion = this.targetCache\n .getLastRemoteSnapshotVersion(txn)\n .next(lastRemoteSnapshotVersion => {\n debugAssert(\n remoteVersion.compareTo(lastRemoteSnapshotVersion) >= 0,\n 'Watch stream reverted to previous snapshot?? ' +\n remoteVersion +\n ' < ' +\n lastRemoteSnapshotVersion\n );\n return this.targetCache.setTargetsMetadata(\n txn,\n txn.currentSequenceNumber,\n remoteVersion\n );\n });\n promises.push(updateRemoteVersion);\n }\n\n return PersistencePromise.waitFor(promises)\n .next(() => documentBuffer.apply(txn))\n .next(() => {\n return this.localDocuments.getLocalViewOfDocuments(\n txn,\n changedDocs\n );\n });\n })\n .then(changedDocs => {\n this.targetDataByTarget = newTargetDataByTargetMap;\n return changedDocs;\n });\n }\n\n /**\n * Returns true if the newTargetData should be persisted during an update of\n * an active target. TargetData should always be persisted when a target is\n * being released and should not call this function.\n *\n * While the target is active, TargetData updates can be omitted when nothing\n * about the target has changed except metadata like the resume token or\n * snapshot version. Occasionally it's worth the extra write to prevent these\n * values from getting too stale after a crash, but this doesn't have to be\n * too frequent.\n */\n private static shouldPersistTargetData(\n oldTargetData: TargetData,\n newTargetData: TargetData,\n change: TargetChange\n ): boolean {\n hardAssert(\n newTargetData.resumeToken.approximateByteSize() > 0,\n 'Attempted to persist target data with no resume token'\n );\n\n // Always persist target data if we don't already have a resume token.\n if (oldTargetData.resumeToken.approximateByteSize() === 0) {\n return true;\n }\n\n // Don't allow resume token changes to be buffered indefinitely. This\n // allows us to be reasonably up-to-date after a crash and avoids needing\n // to loop over all active queries on shutdown. Especially in the browser\n // we may not get time to do anything interesting while the current tab is\n // closing.\n const timeDelta =\n newTargetData.snapshotVersion.toMicroseconds() -\n oldTargetData.snapshotVersion.toMicroseconds();\n if (timeDelta >= this.RESUME_TOKEN_MAX_AGE_MICROS) {\n return true;\n }\n\n // Otherwise if the only thing that has changed about a target is its resume\n // token it's not worth persisting. Note that the RemoteStore keeps an\n // in-memory view of the currently active targets which includes the current\n // resume token, so stream failure or user changes will still use an\n // up-to-date resume token regardless of what we do here.\n const changes =\n change.addedDocuments.size +\n change.modifiedDocuments.size +\n change.removedDocuments.size;\n return changes > 0;\n }\n\n /**\n * Notify local store of the changed views to locally pin documents.\n */\n async notifyLocalViewChanges(viewChanges: LocalViewChanges[]): Promise<void> {\n try {\n await this.persistence.runTransaction(\n 'notifyLocalViewChanges',\n 'readwrite',\n txn => {\n return PersistencePromise.forEach(\n viewChanges,\n (viewChange: LocalViewChanges) => {\n return PersistencePromise.forEach(\n viewChange.addedKeys,\n (key: DocumentKey) =>\n this.persistence.referenceDelegate.addReference(\n txn,\n viewChange.targetId,\n key\n )\n ).next(() =>\n PersistencePromise.forEach(\n viewChange.removedKeys,\n (key: DocumentKey) =>\n this.persistence.referenceDelegate.removeReference(\n txn,\n viewChange.targetId,\n key\n )\n )\n );\n }\n );\n }\n );\n } catch (e) {\n if (isIndexedDbTransactionError(e)) {\n // If `notifyLocalViewChanges` fails, we did not advance the sequence\n // number for the documents that were included in this transaction.\n // This might trigger them to be deleted earlier than they otherwise\n // would have, but it should not invalidate the integrity of the data.\n logDebug(LOG_TAG, 'Failed to update sequence numbers: ' + e);\n } else {\n throw e;\n }\n }\n\n for (const viewChange of viewChanges) {\n const targetId = viewChange.targetId;\n\n if (!viewChange.fromCache) {\n const targetData = this.targetDataByTarget.get(targetId);\n debugAssert(\n targetData !== null,\n `Can't set limbo-free snapshot version for unknown target: ${targetId}`\n );\n\n // Advance the last limbo free snapshot version\n const lastLimboFreeSnapshotVersion = targetData.snapshotVersion;\n const updatedTargetData = targetData.withLastLimboFreeSnapshotVersion(\n lastLimboFreeSnapshotVersion\n );\n this.targetDataByTarget = this.targetDataByTarget.insert(\n targetId,\n updatedTargetData\n );\n }\n }\n }\n\n /**\n * Gets the mutation batch after the passed in batchId in the mutation queue\n * or null if empty.\n * @param afterBatchId If provided, the batch to search after.\n * @returns The next mutation or null if there wasn't one.\n */\n nextMutationBatch(afterBatchId?: BatchId): Promise<MutationBatch | null> {\n return this.persistence.runTransaction(\n 'Get next mutation batch',\n 'readonly',\n txn => {\n if (afterBatchId === undefined) {\n afterBatchId = BATCHID_UNKNOWN;\n }\n return this.mutationQueue.getNextMutationBatchAfterBatchId(\n txn,\n afterBatchId\n );\n }\n );\n }\n\n /**\n * Read the current value of a Document with a given key or null if not\n * found - used for testing.\n */\n readDocument(key: DocumentKey): Promise<MaybeDocument | null> {\n return this.persistence.runTransaction('read document', 'readonly', txn => {\n return this.localDocuments.getDocument(txn, key);\n });\n }\n\n /**\n * Assigns the given target an internal ID so that its results can be pinned so\n * they don't get GC'd. A target must be allocated in the local store before\n * the store can be used to manage its view.\n *\n * Allocating an already allocated `Target` will return the existing `TargetData`\n * for that `Target`.\n */\n allocateTarget(target: Target): Promise<TargetData> {\n return this.persistence\n .runTransaction('Allocate target', 'readwrite', txn => {\n let targetData: TargetData;\n return this.targetCache\n .getTargetData(txn, target)\n .next((cached: TargetData | null) => {\n if (cached) {\n // This target has been listened to previously, so reuse the\n // previous targetID.\n // TODO(mcg): freshen last accessed date?\n targetData = cached;\n return PersistencePromise.resolve(targetData);\n } else {\n return this.targetCache.allocateTargetId(txn).next(targetId => {\n targetData = new TargetData(\n target,\n targetId,\n TargetPurpose.Listen,\n txn.currentSequenceNumber\n );\n return this.targetCache\n .addTargetData(txn, targetData)\n .next(() => targetData);\n });\n }\n });\n })\n .then(targetData => {\n if (this.targetDataByTarget.get(targetData.targetId) === null) {\n this.targetDataByTarget = this.targetDataByTarget.insert(\n targetData.targetId,\n targetData\n );\n this.targetIdByTarget.set(target, targetData.targetId);\n }\n return targetData;\n });\n }\n\n /**\n * Returns the TargetData as seen by the LocalStore, including updates that may\n * have not yet been persisted to the TargetCache.\n */\n // Visible for testing.\n getTargetData(\n transaction: PersistenceTransaction,\n target: Target\n ): PersistencePromise<TargetData | null> {\n const targetId = this.targetIdByTarget.get(target);\n if (targetId !== undefined) {\n return PersistencePromise.resolve<TargetData | null>(\n this.targetDataByTarget.get(targetId)\n );\n } else {\n return this.targetCache.getTargetData(transaction, target);\n }\n }\n\n /**\n * Unpin all the documents associated with the given target. If\n * `keepPersistedTargetData` is set to false and Eager GC enabled, the method\n * directly removes the associated target data from the target cache.\n *\n * Releasing a non-existing `Target` is a no-op.\n */\n // PORTING NOTE: `keepPersistedTargetData` is multi-tab only.\n releaseTarget(\n targetId: number,\n keepPersistedTargetData: boolean\n ): Promise<void> {\n const targetData = this.targetDataByTarget.get(targetId);\n debugAssert(\n targetData !== null,\n `Tried to release nonexistent target: ${targetId}`\n );\n\n const mode = keepPersistedTargetData ? 'readwrite' : 'readwrite-primary';\n return this.persistence\n .runTransaction('Release target', mode, txn => {\n if (!keepPersistedTargetData) {\n return this.persistence.referenceDelegate.removeTarget(\n txn,\n targetData!\n );\n } else {\n return PersistencePromise.resolve();\n }\n })\n .then(() => {\n this.targetDataByTarget = this.targetDataByTarget.remove(targetId);\n this.targetIdByTarget.delete(targetData!.target);\n });\n }\n\n /**\n * Runs the specified query against the local store and returns the results,\n * potentially taking advantage of query data from previous executions (such\n * as the set of remote keys).\n *\n * @param usePreviousResults Whether results from previous executions can\n * be used to optimize this query execution.\n */\n executeQuery(\n query: Query,\n usePreviousResults: boolean\n ): Promise<QueryResult> {\n let lastLimboFreeSnapshotVersion = SnapshotVersion.min();\n let remoteKeys = documentKeySet();\n\n return this.persistence.runTransaction('Execute query', 'readonly', txn => {\n return this.getTargetData(txn, query.toTarget())\n .next(targetData => {\n if (targetData) {\n lastLimboFreeSnapshotVersion =\n targetData.lastLimboFreeSnapshotVersion;\n return this.targetCache\n .getMatchingKeysForTargetId(txn, targetData.targetId)\n .next(result => {\n remoteKeys = result;\n });\n }\n })\n .next(() =>\n this.queryEngine.getDocumentsMatchingQuery(\n txn,\n query,\n usePreviousResults\n ? lastLimboFreeSnapshotVersion\n : SnapshotVersion.min(),\n usePreviousResults ? remoteKeys : documentKeySet()\n )\n )\n .next(documents => {\n return { documents, remoteKeys };\n });\n });\n }\n\n private applyWriteToRemoteDocuments(\n txn: PersistenceTransaction,\n batchResult: MutationBatchResult,\n documentBuffer: RemoteDocumentChangeBuffer\n ): PersistencePromise<void> {\n const batch = batchResult.batch;\n const docKeys = batch.keys();\n let promiseChain = PersistencePromise.resolve();\n docKeys.forEach(docKey => {\n promiseChain = promiseChain\n .next(() => {\n return documentBuffer.getEntry(txn, docKey);\n })\n .next((remoteDoc: MaybeDocument | null) => {\n let doc = remoteDoc;\n const ackVersion = batchResult.docVersions.get(docKey);\n hardAssert(\n ackVersion !== null,\n 'ackVersions should contain every doc in the write.'\n );\n if (!doc || doc.version.compareTo(ackVersion!) < 0) {\n doc = batch.applyToRemoteDocument(docKey, doc, batchResult);\n if (!doc) {\n debugAssert(\n !remoteDoc,\n 'Mutation batch ' +\n batch +\n ' applied to document ' +\n remoteDoc +\n ' resulted in null'\n );\n } else {\n // We use the commitVersion as the readTime rather than the\n // document's updateTime since the updateTime is not advanced\n // for updates that do not modify the underlying document.\n documentBuffer.addEntry(doc, batchResult.commitVersion);\n }\n }\n });\n });\n return promiseChain.next(() =>\n this.mutationQueue.removeMutationBatch(txn, batch)\n );\n }\n\n collectGarbage(garbageCollector: LruGarbageCollector): Promise<LruResults> {\n return this.persistence.runTransaction(\n 'Collect garbage',\n 'readwrite-primary',\n txn => garbageCollector.collect(txn, this.targetDataByTarget)\n );\n }\n}\n\n/**\n * An implementation of LocalStore that provides additional functionality\n * for MultiTabSyncEngine.\n */\n// PORTING NOTE: Web only.\nexport class MultiTabLocalStore extends LocalStore {\n protected mutationQueue: IndexedDbMutationQueue;\n protected remoteDocuments: IndexedDbRemoteDocumentCache;\n protected targetCache: IndexedDbTargetCache;\n\n constructor(\n protected persistence: IndexedDbPersistence,\n queryEngine: QueryEngine,\n initialUser: User\n ) {\n super(persistence, queryEngine, initialUser);\n\n this.mutationQueue = persistence.getMutationQueue(initialUser);\n this.remoteDocuments = persistence.getRemoteDocumentCache();\n this.targetCache = persistence.getTargetCache();\n }\n\n /** Starts the LocalStore. */\n start(): Promise<void> {\n return this.synchronizeLastDocumentChangeReadTime();\n }\n\n /** Returns the local view of the documents affected by a mutation batch. */\n lookupMutationDocuments(batchId: BatchId): Promise<MaybeDocumentMap | null> {\n return this.persistence.runTransaction(\n 'Lookup mutation documents',\n 'readonly',\n txn => {\n return this.mutationQueue\n .lookupMutationKeys(txn, batchId)\n .next(keys => {\n if (keys) {\n return this.localDocuments.getDocuments(\n txn,\n keys\n ) as PersistencePromise<MaybeDocumentMap | null>;\n } else {\n return PersistencePromise.resolve<MaybeDocumentMap | null>(null);\n }\n });\n }\n );\n }\n\n removeCachedMutationBatchMetadata(batchId: BatchId): void {\n this.mutationQueue.removeCachedMutationKeys(batchId);\n }\n\n setNetworkEnabled(networkEnabled: boolean): void {\n this.persistence.setNetworkEnabled(networkEnabled);\n }\n\n getActiveClients(): Promise<ClientId[]> {\n return this.persistence.getActiveClients();\n }\n\n getTarget(targetId: TargetId): Promise<Target | null> {\n const cachedTargetData = this.targetDataByTarget.get(targetId);\n\n if (cachedTargetData) {\n return Promise.resolve(cachedTargetData.target);\n } else {\n return this.persistence.runTransaction(\n 'Get target data',\n 'readonly',\n txn => {\n return this.targetCache\n .getTargetDataForTarget(txn, targetId)\n .next(targetData => (targetData ? targetData.target : null));\n }\n );\n }\n }\n\n /**\n * Returns the set of documents that have been updated since the last call.\n * If this is the first call, returns the set of changes since client\n * initialization. Further invocations will return document changes since\n * the point of rejection.\n */\n getNewDocumentChanges(): Promise<MaybeDocumentMap> {\n return this.persistence\n .runTransaction('Get new document changes', 'readonly', txn =>\n this.remoteDocuments.getNewDocumentChanges(\n txn,\n this.lastDocumentChangeReadTime\n )\n )\n .then(({ changedDocs, readTime }) => {\n this.lastDocumentChangeReadTime = readTime;\n return changedDocs;\n });\n }\n\n /**\n * Reads the newest document change from persistence and forwards the internal\n * synchronization marker so that calls to `getNewDocumentChanges()`\n * only return changes that happened after client initialization.\n */\n async synchronizeLastDocumentChangeReadTime(): Promise<void> {\n this.lastDocumentChangeReadTime = await this.persistence.runTransaction(\n 'Synchronize last document change read time',\n 'readonly',\n txn => this.remoteDocuments.getLastReadTime(txn)\n );\n }\n}\n\n/**\n * Verifies the error thrown by a LocalStore operation. If a LocalStore\n * operation fails because the primary lease has been taken by another client,\n * we ignore the error (the persistence layer will immediately call\n * `applyPrimaryLease` to propagate the primary state change). All other errors\n * are re-thrown.\n *\n * @param err An error returned by a LocalStore operation.\n * @return A Promise that resolves after we recovered, or the original error.\n */\nexport async function ignoreIfPrimaryLeaseLoss(\n err: FirestoreError\n): Promise<void> {\n if (\n err.code === Code.FAILED_PRECONDITION &&\n err.message === PRIMARY_LEASE_LOST_ERROR_MSG\n ) {\n logDebug(LOG_TAG, 'Unexpectedly lost primary lease');\n } else {\n throw err;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { User } from '../auth/user';\nimport { ListenSequenceNumber, TargetId } from '../core/types';\nimport { DocumentKey } from '../model/document_key';\nimport { IndexManager } from './index_manager';\nimport { LocalStore } from './local_store';\nimport { MutationQueue } from './mutation_queue';\nimport { PersistencePromise } from './persistence_promise';\nimport { TargetCache } from './target_cache';\nimport { RemoteDocumentCache } from './remote_document_cache';\nimport { TargetData } from './target_data';\n\nexport const PRIMARY_LEASE_LOST_ERROR_MSG =\n 'The current tab is not in the required state to perform this operation. ' +\n 'It might be necessary to refresh the browser tab.';\n\n/**\n * A base class representing a persistence transaction, encapsulating both the\n * transaction's sequence numbers as well as a list of onCommitted listeners.\n *\n * When you call Persistence.runTransaction(), it will create a transaction and\n * pass it to your callback. You then pass it to any method that operates\n * on persistence.\n */\nexport abstract class PersistenceTransaction {\n private readonly onCommittedListeners: Array<() => void> = [];\n\n abstract readonly currentSequenceNumber: ListenSequenceNumber;\n\n addOnCommittedListener(listener: () => void): void {\n this.onCommittedListeners.push(listener);\n }\n\n raiseOnCommittedEvent(): void {\n this.onCommittedListeners.forEach(listener => listener());\n }\n}\n\n/** The different modes supported by `IndexedDbPersistence.runTransaction()`. */\nexport type PersistenceTransactionMode =\n | 'readonly'\n | 'readwrite'\n | 'readwrite-primary';\n\n/**\n * Callback type for primary state notifications. This callback can be\n * registered with the persistence layer to get notified when we transition from\n * primary to secondary state and vice versa.\n *\n * Note: Instances can only toggle between Primary and Secondary state if\n * IndexedDB persistence is enabled and multiple clients are active. If this\n * listener is registered with MemoryPersistence, the callback will be called\n * exactly once marking the current instance as Primary.\n */\nexport type PrimaryStateListener = (isPrimary: boolean) => Promise<void>;\n\n/**\n * A ReferenceDelegate instance handles all of the hooks into the document-reference lifecycle. This\n * includes being added to a target, being removed from a target, being subject to mutation, and\n * being mutated by the user.\n *\n * Different implementations may do different things with each of these events. Not every\n * implementation needs to do something with every lifecycle hook.\n *\n * PORTING NOTE: since sequence numbers are attached to transactions in this\n * client, the ReferenceDelegate does not need to deal in transactional\n * semantics (onTransactionStarted/Committed()), nor does it need to track and\n * generate sequence numbers (getCurrentSequenceNumber()).\n */\nexport interface ReferenceDelegate {\n /** Notify the delegate that the given document was added to a target. */\n addReference(\n txn: PersistenceTransaction,\n targetId: TargetId,\n doc: DocumentKey\n ): PersistencePromise<void>;\n\n /** Notify the delegate that the given document was removed from a target. */\n removeReference(\n txn: PersistenceTransaction,\n targetId: TargetId,\n doc: DocumentKey\n ): PersistencePromise<void>;\n\n /**\n * Notify the delegate that a target was removed. The delegate may, but is not obligated to,\n * actually delete the target and associated data.\n */\n removeTarget(\n txn: PersistenceTransaction,\n targetData: TargetData\n ): PersistencePromise<void>;\n\n /**\n * Notify the delegate that a document may no longer be part of any views or\n * have any mutations associated.\n */\n markPotentiallyOrphaned(\n txn: PersistenceTransaction,\n doc: DocumentKey\n ): PersistencePromise<void>;\n\n /** Notify the delegate that a limbo document was updated. */\n updateLimboDocument(\n txn: PersistenceTransaction,\n doc: DocumentKey\n ): PersistencePromise<void>;\n}\n\n/**\n * Persistence is the lowest-level shared interface to persistent storage in\n * Firestore.\n *\n * Persistence is used to create MutationQueue and RemoteDocumentCache\n * instances backed by persistence (which might be in-memory or LevelDB).\n *\n * Persistence also exposes an API to create and run PersistenceTransactions\n * against persistence. All read / write operations must be wrapped in a\n * transaction. Implementations of PersistenceTransaction / Persistence only\n * need to guarantee that writes made against the transaction are not made to\n * durable storage until the transaction resolves its PersistencePromise.\n * Since memory-only storage components do not alter durable storage, they are\n * free to ignore the transaction.\n *\n * This contract is enough to allow the LocalStore be be written\n * independently of whether or not the stored state actually is durably\n * persisted. If persistent storage is enabled, writes are grouped together to\n * avoid inconsistent state that could cause crashes.\n *\n * Concretely, when persistent storage is enabled, the persistent versions of\n * MutationQueue, RemoteDocumentCache, and others (the mutators) will\n * defer their writes into a transaction. Once the local store has completed\n * one logical operation, it commits the transaction.\n *\n * When persistent storage is disabled, the non-persistent versions of the\n * mutators ignore the transaction. This short-cut is allowed because\n * memory-only storage leaves no state so it cannot be inconsistent.\n *\n * This simplifies the implementations of the mutators and allows memory-only\n * implementations to supplement the persistent ones without requiring any\n * special dual-store implementation of Persistence. The cost is that the\n * LocalStore needs to be slightly careful about the order of its reads and\n * writes in order to avoid relying on being able to read back uncommitted\n * writes.\n */\nexport interface Persistence {\n /**\n * Whether or not this persistence instance has been started.\n */\n readonly started: boolean;\n\n readonly referenceDelegate: ReferenceDelegate;\n\n /** Starts persistence. */\n start(): Promise<void>;\n\n /**\n * Releases any resources held during eager shutdown.\n */\n shutdown(): Promise<void>;\n\n /**\n * Registers a listener that gets called when the database receives a\n * version change event indicating that it has deleted.\n *\n * PORTING NOTE: This is only used for Web multi-tab.\n */\n setDatabaseDeletedListener(\n databaseDeletedListener: () => Promise<void>\n ): void;\n\n /**\n * Returns a MutationQueue representing the persisted mutations for the\n * given user.\n *\n * Note: The implementation is free to return the same instance every time\n * this is called for a given user. In particular, the memory-backed\n * implementation does this to emulate the persisted implementation to the\n * extent possible (e.g. in the case of uid switching from\n * sally=>jack=>sally, sally's mutation queue will be preserved).\n */\n getMutationQueue(user: User): MutationQueue;\n\n /**\n * Returns a TargetCache representing the persisted cache of targets.\n *\n * Note: The implementation is free to return the same instance every time\n * this is called. In particular, the memory-backed implementation does this\n * to emulate the persisted implementation to the extent possible.\n */\n getTargetCache(): TargetCache;\n\n /**\n * Returns a RemoteDocumentCache representing the persisted cache of remote\n * documents.\n *\n * Note: The implementation is free to return the same instance every time\n * this is called. In particular, the memory-backed implementation does this\n * to emulate the persisted implementation to the extent possible.\n */\n getRemoteDocumentCache(): RemoteDocumentCache;\n\n /**\n * Returns an IndexManager instance that manages our persisted query indexes.\n *\n * Note: The implementation is free to return the same instance every time\n * this is called. In particular, the memory-backed implementation does this\n * to emulate the persisted implementation to the extent possible.\n */\n getIndexManager(): IndexManager;\n\n /**\n * Performs an operation inside a persistence transaction. Any reads or writes\n * against persistence must be performed within a transaction. Writes will be\n * committed atomically once the transaction completes.\n *\n * Persistence operations are asynchronous and therefore the provided\n * transactionOperation must return a PersistencePromise. When it is resolved,\n * the transaction will be committed and the Promise returned by this method\n * will resolve.\n *\n * @param action A description of the action performed by this transaction,\n * used for logging.\n * @param mode The underlying mode of the IndexedDb transaction. Can be\n * 'readonly`, 'readwrite' or 'readwrite-primary'. Transactions marked\n * 'readwrite-primary' can only be executed by the primary client. In this\n * mode, the transactionOperation will not be run if the primary lease cannot\n * be acquired and the returned promise will be rejected with a\n * FAILED_PRECONDITION error.\n * @param transactionOperation The operation to run inside a transaction.\n * @return A promise that is resolved once the transaction completes.\n */\n runTransaction<T>(\n action: string,\n mode: PersistenceTransactionMode,\n transactionOperation: (\n transaction: PersistenceTransaction\n ) => PersistencePromise<T>\n ): Promise<T>;\n}\n\n/**\n * Interface implemented by the LRU scheduler to start(), stop() and restart\n * garbage collection.\n */\nexport interface GarbageCollectionScheduler {\n readonly started: boolean;\n start(localStore: LocalStore): void;\n stop(): void;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { BatchId, TargetId } from '../core/types';\nimport { documentKeySet, DocumentKeySet } from '../model/collections';\nimport { DocumentKey } from '../model/document_key';\nimport { primitiveComparator } from '../util/misc';\nimport { SortedSet } from '../util/sorted_set';\n\n/**\n * A collection of references to a document from some kind of numbered entity\n * (either a target ID or batch ID). As references are added to or removed from\n * the set corresponding events are emitted to a registered garbage collector.\n *\n * Each reference is represented by a DocumentReference object. Each of them\n * contains enough information to uniquely identify the reference. They are all\n * stored primarily in a set sorted by key. A document is considered garbage if\n * there's no references in that set (this can be efficiently checked thanks to\n * sorting by key).\n *\n * ReferenceSet also keeps a secondary set that contains references sorted by\n * IDs. This one is used to efficiently implement removal of all references by\n * some target ID.\n */\nexport class ReferenceSet {\n // A set of outstanding references to a document sorted by key.\n private refsByKey = new SortedSet(DocReference.compareByKey);\n\n // A set of outstanding references to a document sorted by target id.\n private refsByTarget = new SortedSet(DocReference.compareByTargetId);\n\n /** Returns true if the reference set contains no references. */\n isEmpty(): boolean {\n return this.refsByKey.isEmpty();\n }\n\n /** Adds a reference to the given document key for the given ID. */\n addReference(key: DocumentKey, id: TargetId | BatchId): void {\n const ref = new DocReference(key, id);\n this.refsByKey = this.refsByKey.add(ref);\n this.refsByTarget = this.refsByTarget.add(ref);\n }\n\n /** Add references to the given document keys for the given ID. */\n addReferences(keys: DocumentKeySet, id: TargetId | BatchId): void {\n keys.forEach(key => this.addReference(key, id));\n }\n\n /**\n * Removes a reference to the given document key for the given\n * ID.\n */\n removeReference(key: DocumentKey, id: TargetId | BatchId): void {\n this.removeRef(new DocReference(key, id));\n }\n\n removeReferences(keys: DocumentKeySet, id: TargetId | BatchId): void {\n keys.forEach(key => this.removeReference(key, id));\n }\n\n /**\n * Clears all references with a given ID. Calls removeRef() for each key\n * removed.\n */\n removeReferencesForId(id: TargetId | BatchId): DocumentKey[] {\n const emptyKey = DocumentKey.EMPTY;\n const startRef = new DocReference(emptyKey, id);\n const endRef = new DocReference(emptyKey, id + 1);\n const keys: DocumentKey[] = [];\n this.refsByTarget.forEachInRange([startRef, endRef], ref => {\n this.removeRef(ref);\n keys.push(ref.key);\n });\n return keys;\n }\n\n removeAllReferences(): void {\n this.refsByKey.forEach(ref => this.removeRef(ref));\n }\n\n private removeRef(ref: DocReference): void {\n this.refsByKey = this.refsByKey.delete(ref);\n this.refsByTarget = this.refsByTarget.delete(ref);\n }\n\n referencesForId(id: TargetId | BatchId): DocumentKeySet {\n const emptyKey = DocumentKey.EMPTY;\n const startRef = new DocReference(emptyKey, id);\n const endRef = new DocReference(emptyKey, id + 1);\n let keys = documentKeySet();\n this.refsByTarget.forEachInRange([startRef, endRef], ref => {\n keys = keys.add(ref.key);\n });\n return keys;\n }\n\n containsKey(key: DocumentKey): boolean {\n const ref = new DocReference(key, 0);\n const firstRef = this.refsByKey.firstAfterOrEqual(ref);\n return firstRef !== null && key.isEqual(firstRef.key);\n }\n}\n\nexport class DocReference {\n constructor(\n public key: DocumentKey,\n public targetOrBatchId: TargetId | BatchId\n ) {}\n\n /** Compare by key then by ID */\n static compareByKey(left: DocReference, right: DocReference): number {\n return (\n DocumentKey.comparator(left.key, right.key) ||\n primitiveComparator(left.targetOrBatchId, right.targetOrBatchId)\n );\n }\n\n /** Compare by ID then by key */\n static compareByTargetId(left: DocReference, right: DocReference): number {\n return (\n primitiveComparator(left.targetOrBatchId, right.targetOrBatchId) ||\n DocumentKey.comparator(left.key, right.key)\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { fail } from './assert';\nimport { Code, FirestoreError } from './error';\nimport { Dict, forEach } from './obj';\n\n/** Types accepted by validateType() and related methods for validation. */\nexport type ValidationType =\n | 'undefined'\n | 'object'\n | 'function'\n | 'boolean'\n | 'number'\n | 'string'\n | 'non-empty string';\n\n/**\n * Validates that no arguments were passed in the invocation of functionName.\n *\n * Forward the magic \"arguments\" variable as second parameter on which the\n * parameter validation is performed:\n * validateNoArgs('myFunction', arguments);\n */\nexport function validateNoArgs(functionName: string, args: IArguments): void {\n if (args.length !== 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() does not support arguments, ` +\n 'but was called with ' +\n formatPlural(args.length, 'argument') +\n '.'\n );\n }\n}\n\n/**\n * Validates the invocation of functionName has the exact number of arguments.\n *\n * Forward the magic \"arguments\" variable as second parameter on which the\n * parameter validation is performed:\n * validateExactNumberOfArgs('myFunction', arguments, 2);\n */\nexport function validateExactNumberOfArgs(\n functionName: string,\n args: IArguments,\n numberOfArgs: number\n): void {\n if (args.length !== numberOfArgs) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires ` +\n formatPlural(numberOfArgs, 'argument') +\n ', but was called with ' +\n formatPlural(args.length, 'argument') +\n '.'\n );\n }\n}\n\n/**\n * Validates the invocation of functionName has at least the provided number of\n * arguments (but can have many more).\n *\n * Forward the magic \"arguments\" variable as second parameter on which the\n * parameter validation is performed:\n * validateAtLeastNumberOfArgs('myFunction', arguments, 2);\n */\nexport function validateAtLeastNumberOfArgs(\n functionName: string,\n args: IArguments,\n minNumberOfArgs: number\n): void {\n if (args.length < minNumberOfArgs) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires at least ` +\n formatPlural(minNumberOfArgs, 'argument') +\n ', but was called with ' +\n formatPlural(args.length, 'argument') +\n '.'\n );\n }\n}\n\n/**\n * Validates the invocation of functionName has number of arguments between\n * the values provided.\n *\n * Forward the magic \"arguments\" variable as second parameter on which the\n * parameter validation is performed:\n * validateBetweenNumberOfArgs('myFunction', arguments, 2, 3);\n */\nexport function validateBetweenNumberOfArgs(\n functionName: string,\n args: IArguments,\n minNumberOfArgs: number,\n maxNumberOfArgs: number\n): void {\n if (args.length < minNumberOfArgs || args.length > maxNumberOfArgs) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires between ${minNumberOfArgs} and ` +\n `${maxNumberOfArgs} arguments, but was called with ` +\n formatPlural(args.length, 'argument') +\n '.'\n );\n }\n}\n\n/**\n * Validates the provided argument is an array and has as least the expected\n * number of elements.\n */\nexport function validateNamedArrayAtLeastNumberOfElements<T>(\n functionName: string,\n value: T[],\n name: string,\n minNumberOfElements: number\n): void {\n if (!(value instanceof Array) || value.length < minNumberOfElements) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires its ${name} argument to be an ` +\n 'array with at least ' +\n `${formatPlural(minNumberOfElements, 'element')}.`\n );\n }\n}\n\n/**\n * Validates the provided positional argument has the native JavaScript type\n * using typeof checks.\n */\nexport function validateArgType(\n functionName: string,\n type: ValidationType,\n position: number,\n argument: unknown\n): void {\n validateType(functionName, type, `${ordinal(position)} argument`, argument);\n}\n\n/**\n * Validates the provided argument has the native JavaScript type using\n * typeof checks or is undefined.\n */\nexport function validateOptionalArgType(\n functionName: string,\n type: ValidationType,\n position: number,\n argument: unknown\n): void {\n if (argument !== undefined) {\n validateArgType(functionName, type, position, argument);\n }\n}\n\n/**\n * Validates the provided named option has the native JavaScript type using\n * typeof checks.\n */\nexport function validateNamedType(\n functionName: string,\n type: ValidationType,\n optionName: string,\n argument: unknown\n): void {\n validateType(functionName, type, `${optionName} option`, argument);\n}\n\n/**\n * Validates the provided named option has the native JavaScript type using\n * typeof checks or is undefined.\n */\nexport function validateNamedOptionalType(\n functionName: string,\n type: ValidationType,\n optionName: string,\n argument: unknown\n): void {\n if (argument !== undefined) {\n validateNamedType(functionName, type, optionName, argument);\n }\n}\n\nexport function validateArrayElements<T>(\n functionName: string,\n optionName: string,\n typeDescription: string,\n argument: T[],\n validator: (arg0: T) => boolean\n): void {\n if (!(argument instanceof Array)) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires its ${optionName} ` +\n `option to be an array, but it was: ${valueDescription(argument)}`\n );\n }\n\n for (let i = 0; i < argument.length; ++i) {\n if (!validator(argument[i])) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires all ${optionName} ` +\n `elements to be ${typeDescription}, but the value at index ${i} ` +\n `was: ${valueDescription(argument[i])}`\n );\n }\n }\n}\n\nexport function validateOptionalArrayElements<T>(\n functionName: string,\n optionName: string,\n typeDescription: string,\n argument: T[] | undefined,\n validator: (arg0: T) => boolean\n): void {\n if (argument !== undefined) {\n validateArrayElements(\n functionName,\n optionName,\n typeDescription,\n argument,\n validator\n );\n }\n}\n\n/**\n * Validates that the provided named option equals one of the expected values.\n */\nexport function validateNamedPropertyEquals<T>(\n functionName: string,\n inputName: string,\n optionName: string,\n input: T,\n expected: T[]\n): void {\n const expectedDescription: string[] = [];\n\n for (const val of expected) {\n if (val === input) {\n return;\n }\n expectedDescription.push(valueDescription(val));\n }\n\n const actualDescription = valueDescription(input);\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid value ${actualDescription} provided to function ${functionName}() for option ` +\n `\"${optionName}\". Acceptable values: ${expectedDescription.join(', ')}`\n );\n}\n\n/**\n * Validates that the provided named option equals one of the expected values or\n * is undefined.\n */\nexport function validateNamedOptionalPropertyEquals<T>(\n functionName: string,\n inputName: string,\n optionName: string,\n input: T,\n expected: T[]\n): void {\n if (input !== undefined) {\n validateNamedPropertyEquals(\n functionName,\n inputName,\n optionName,\n input,\n expected\n );\n }\n}\n\n/**\n * Validates that the provided argument is a valid enum.\n *\n * @param functionName Function making the validation call.\n * @param enums Array containing all possible values for the enum.\n * @param position Position of the argument in `functionName`.\n * @param argument Argument to validate.\n * @return The value as T if the argument can be converted.\n */\nexport function validateStringEnum<T>(\n functionName: string,\n enums: T[],\n position: number,\n argument: unknown\n): T {\n if (!enums.some(element => element === argument)) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid value ${valueDescription(argument)} provided to function ` +\n `${functionName}() for its ${ordinal(position)} argument. Acceptable ` +\n `values: ${enums.join(', ')}`\n );\n }\n return argument as T;\n}\n\n/** Helper to validate the type of a provided input. */\nfunction validateType(\n functionName: string,\n type: ValidationType,\n inputName: string,\n input: unknown\n): void {\n let valid = false;\n if (type === 'object') {\n valid = isPlainObject(input);\n } else if (type === 'non-empty string') {\n valid = typeof input === 'string' && input !== '';\n } else {\n valid = typeof input === type;\n }\n\n if (!valid) {\n const description = valueDescription(input);\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires its ${inputName} ` +\n `to be of type ${type}, but it was: ${description}`\n );\n }\n}\n\n/**\n * Returns true if it's a non-null object without a custom prototype\n * (i.e. excludes Array, Date, etc.).\n */\nexport function isPlainObject(input: unknown): boolean {\n return (\n typeof input === 'object' &&\n input !== null &&\n (Object.getPrototypeOf(input) === Object.prototype ||\n Object.getPrototypeOf(input) === null)\n );\n}\n\n/** Returns a string describing the type / value of the provided input. */\nexport function valueDescription(input: unknown): string {\n if (input === undefined) {\n return 'undefined';\n } else if (input === null) {\n return 'null';\n } else if (typeof input === 'string') {\n if (input.length > 20) {\n input = `${input.substring(0, 20)}...`;\n }\n return JSON.stringify(input);\n } else if (typeof input === 'number' || typeof input === 'boolean') {\n return '' + input;\n } else if (typeof input === 'object') {\n if (input instanceof Array) {\n return 'an array';\n } else {\n const customObjectName = tryGetCustomObjectType(input!);\n if (customObjectName) {\n return `a custom ${customObjectName} object`;\n } else {\n return 'an object';\n }\n }\n } else if (typeof input === 'function') {\n return 'a function';\n } else {\n return fail('Unknown wrong type: ' + typeof input);\n }\n}\n\n/** Hacky method to try to get the constructor name for an object. */\nexport function tryGetCustomObjectType(input: object): string | null {\n if (input.constructor) {\n const funcNameRegex = /function\\s+([^\\s(]+)\\s*\\(/;\n const results = funcNameRegex.exec(input.constructor.toString());\n if (results && results.length > 1) {\n return results[1];\n }\n }\n return null;\n}\n\n/** Validates the provided argument is defined. */\nexport function validateDefined(\n functionName: string,\n position: number,\n argument: unknown\n): void {\n if (argument === undefined) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires a valid ${ordinal(position)} ` +\n `argument, but it was undefined.`\n );\n }\n}\n\n/**\n * Validates the provided positional argument is an object, and its keys and\n * values match the expected keys and types provided in optionTypes.\n */\nexport function validateOptionNames(\n functionName: string,\n options: object,\n optionNames: string[]\n): void {\n forEach(options as Dict<unknown>, (key, _) => {\n if (optionNames.indexOf(key) < 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Unknown option '${key}' passed to function ${functionName}(). ` +\n 'Available options: ' +\n optionNames.join(', ')\n );\n }\n });\n}\n\n/**\n * Helper method to throw an error that the provided argument did not pass\n * an instanceof check.\n */\nexport function invalidClassError(\n functionName: string,\n type: string,\n position: number,\n argument: unknown\n): Error {\n const description = valueDescription(argument);\n return new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires its ${ordinal(position)} ` +\n `argument to be a ${type}, but it was: ${description}`\n );\n}\n\nexport function validatePositiveNumber(\n functionName: string,\n position: number,\n n: number\n): void {\n if (n <= 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${functionName}() requires its ${ordinal(\n position\n )} argument to be a positive number, but it was: ${n}.`\n );\n }\n}\n\n/** Converts a number to its english word representation */\nfunction ordinal(num: number): string {\n switch (num) {\n case 1:\n return 'first';\n case 2:\n return 'second';\n case 3:\n return 'third';\n default:\n return num + 'th';\n }\n}\n\n/**\n * Formats the given word as plural conditionally given the preceding number.\n */\nfunction formatPlural(num: number, str: string): string {\n return `${num} ${str}` + (num === 1 ? '' : 's');\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { PlatformSupport } from '../platform/platform';\nimport { Code, FirestoreError } from '../util/error';\nimport {\n invalidClassError,\n validateArgType,\n validateExactNumberOfArgs\n} from '../util/input_validation';\nimport { ByteString } from '../util/byte_string';\n\n/** Helper function to assert Uint8Array is available at runtime. */\nfunction assertUint8ArrayAvailable(): void {\n if (typeof Uint8Array === 'undefined') {\n throw new FirestoreError(\n Code.UNIMPLEMENTED,\n 'Uint8Arrays are not available in this environment.'\n );\n }\n}\n\n/** Helper function to assert Base64 functions are available at runtime. */\nfunction assertBase64Available(): void {\n if (!PlatformSupport.getPlatform().base64Available) {\n throw new FirestoreError(\n Code.UNIMPLEMENTED,\n 'Blobs are unavailable in Firestore in this environment.'\n );\n }\n}\n\n/**\n * Immutable class holding a blob (binary data).\n * This class is directly exposed in the public API.\n *\n * Note that while you can't hide the constructor in JavaScript code, we are\n * using the hack above to make sure no-one outside this module can call it.\n */\nexport class Blob {\n // Prefix with underscore to signal that we consider this not part of the\n // public API and to prevent it from showing up for autocompletion.\n _byteString: ByteString;\n\n constructor(byteString: ByteString) {\n assertBase64Available();\n this._byteString = byteString;\n }\n\n static fromBase64String(base64: string): Blob {\n validateExactNumberOfArgs('Blob.fromBase64String', arguments, 1);\n validateArgType('Blob.fromBase64String', 'string', 1, base64);\n assertBase64Available();\n try {\n return new Blob(ByteString.fromBase64String(base64));\n } catch (e) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Failed to construct Blob from Base64 string: ' + e\n );\n }\n }\n\n static fromUint8Array(array: Uint8Array): Blob {\n validateExactNumberOfArgs('Blob.fromUint8Array', arguments, 1);\n assertUint8ArrayAvailable();\n if (!(array instanceof Uint8Array)) {\n throw invalidClassError('Blob.fromUint8Array', 'Uint8Array', 1, array);\n }\n return new Blob(ByteString.fromUint8Array(array));\n }\n\n toBase64(): string {\n validateExactNumberOfArgs('Blob.toBase64', arguments, 0);\n assertBase64Available();\n return this._byteString.toBase64();\n }\n\n toUint8Array(): Uint8Array {\n validateExactNumberOfArgs('Blob.toUint8Array', arguments, 0);\n assertUint8ArrayAvailable();\n return this._byteString.toUint8Array();\n }\n\n toString(): string {\n return 'Blob(base64: ' + this.toBase64() + ')';\n }\n\n isEqual(other: Blob): boolean {\n return this._byteString.isEqual(other._byteString);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as firestore from '@firebase/firestore-types';\n\nimport { FieldPath as InternalFieldPath } from '../model/path';\nimport { Code, FirestoreError } from '../util/error';\nimport {\n invalidClassError,\n validateArgType,\n validateNamedArrayAtLeastNumberOfElements\n} from '../util/input_validation';\n\n// The objects that are a part of this API are exposed to third-parties as\n// compiled javascript so we want to flag our private members with a leading\n// underscore to discourage their use.\n\n/**\n * A FieldPath refers to a field in a document. The path may consist of a single\n * field name (referring to a top-level field in the document), or a list of\n * field names (referring to a nested field in the document).\n */\nexport class FieldPath implements firestore.FieldPath {\n /** Internal representation of a Firestore field path. */\n _internalPath: InternalFieldPath;\n\n /**\n * Creates a FieldPath from the provided field names. If more than one field\n * name is provided, the path will point to a nested field in a document.\n *\n * @param fieldNames A list of field names.\n */\n constructor(...fieldNames: string[]) {\n validateNamedArrayAtLeastNumberOfElements(\n 'FieldPath',\n fieldNames,\n 'fieldNames',\n 1\n );\n\n for (let i = 0; i < fieldNames.length; ++i) {\n validateArgType('FieldPath', 'string', i, fieldNames[i]);\n if (fieldNames[i].length === 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid field name at argument $(i + 1). ` +\n 'Field names must not be empty.'\n );\n }\n }\n\n this._internalPath = new InternalFieldPath(fieldNames);\n }\n\n /**\n * Internal Note: The backend doesn't technically support querying by\n * document ID. Instead it queries by the entire document name (full path\n * included), but in the cases we currently support documentId(), the net\n * effect is the same.\n */\n private static readonly _DOCUMENT_ID = new FieldPath(\n InternalFieldPath.keyField().canonicalString()\n );\n\n static documentId(): FieldPath {\n return FieldPath._DOCUMENT_ID;\n }\n\n isEqual(other: firestore.FieldPath): boolean {\n if (!(other instanceof FieldPath)) {\n throw invalidClassError('isEqual', 'FieldPath', 1, other);\n }\n return this._internalPath.isEqual(other._internalPath);\n }\n}\n\n/**\n * Matches any characters in a field path string that are reserved.\n */\nconst RESERVED = new RegExp('[~\\\\*/\\\\[\\\\]]');\n\n/**\n * Parses a field path string into a FieldPath, treating dots as separators.\n */\nexport function fromDotSeparatedString(path: string): FieldPath {\n const found = path.search(RESERVED);\n if (found >= 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid field path (${path}). Paths must not contain ` +\n `'~', '*', '/', '[', or ']'`\n );\n }\n try {\n return new FieldPath(...path.split('.'));\n } catch (e) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid field path (${path}). Paths must not be empty, ` +\n `begin with '.', end with '.', or contain '..'`\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as firestore from '@firebase/firestore-types';\nimport {\n validateArgType,\n validateAtLeastNumberOfArgs,\n validateExactNumberOfArgs,\n validateNoArgs\n} from '../util/input_validation';\nimport { FieldTransform } from '../model/mutation';\nimport {\n ArrayRemoveTransformOperation,\n ArrayUnionTransformOperation,\n NumericIncrementTransformOperation,\n ServerTimestampTransform\n} from '../model/transform_operation';\nimport { ParseContext, parseData, UserDataSource } from './user_data_reader';\nimport { debugAssert } from '../util/assert';\n\n/**\n * An opaque base class for FieldValue sentinel objects in our public API,\n * with public static methods for creating said sentinel objects.\n */\nexport abstract class FieldValueImpl {\n protected constructor(readonly _methodName: string) {}\n\n abstract toFieldTransform(context: ParseContext): FieldTransform | null;\n\n abstract isEqual(other: FieldValue): boolean;\n}\n\nexport class DeleteFieldValueImpl extends FieldValueImpl {\n constructor() {\n super('FieldValue.delete');\n }\n\n toFieldTransform(context: ParseContext): null {\n if (context.dataSource === UserDataSource.MergeSet) {\n // No transform to add for a delete, but we need to add it to our\n // fieldMask so it gets deleted.\n context.fieldMask.push(context.path!);\n } else if (context.dataSource === UserDataSource.Update) {\n debugAssert(\n context.path!.length > 0,\n 'FieldValue.delete() at the top level should have already' +\n ' been handled.'\n );\n throw context.createError(\n 'FieldValue.delete() can only appear at the top level ' +\n 'of your update data'\n );\n } else {\n // We shouldn't encounter delete sentinels for queries or non-merge set() calls.\n throw context.createError(\n 'FieldValue.delete() cannot be used with set() unless you pass ' +\n '{merge:true}'\n );\n }\n return null;\n }\n\n isEqual(other: FieldValue): boolean {\n return other instanceof DeleteFieldValueImpl;\n }\n}\n\nexport class ServerTimestampFieldValueImpl extends FieldValueImpl {\n constructor() {\n super('FieldValue.serverTimestamp');\n }\n\n toFieldTransform(context: ParseContext): FieldTransform {\n return new FieldTransform(context.path!, ServerTimestampTransform.instance);\n }\n\n isEqual(other: FieldValue): boolean {\n return other instanceof ServerTimestampFieldValueImpl;\n }\n}\n\nexport class ArrayUnionFieldValueImpl extends FieldValueImpl {\n constructor(private readonly _elements: unknown[]) {\n super('FieldValue.arrayUnion');\n }\n\n toFieldTransform(context: ParseContext): FieldTransform {\n // Although array transforms are used with writes, the actual elements\n // being uniomed or removed are not considered writes since they cannot\n // contain any FieldValue sentinels, etc.\n const parseContext = new ParseContext(\n {\n dataSource: UserDataSource.Argument,\n methodName: this._methodName,\n arrayElement: true\n },\n context.databaseId,\n context.serializer,\n context.ignoreUndefinedProperties\n );\n const parsedElements = this._elements.map(\n element => parseData(element, parseContext)!\n );\n const arrayUnion = new ArrayUnionTransformOperation(parsedElements);\n return new FieldTransform(context.path!, arrayUnion);\n }\n\n isEqual(other: FieldValue): boolean {\n // TODO(mrschmidt): Implement isEquals\n return this === other;\n }\n}\n\nexport class ArrayRemoveFieldValueImpl extends FieldValueImpl {\n constructor(readonly _elements: unknown[]) {\n super('FieldValue.arrayRemove');\n }\n\n toFieldTransform(context: ParseContext): FieldTransform {\n // Although array transforms are used with writes, the actual elements\n // being unioned or removed are not considered writes since they cannot\n // contain any FieldValue sentinels, etc.\n const parseContext = new ParseContext(\n {\n dataSource: UserDataSource.Argument,\n methodName: this._methodName,\n arrayElement: true\n },\n context.databaseId,\n context.serializer,\n context.ignoreUndefinedProperties\n );\n const parsedElements = this._elements.map(\n element => parseData(element, parseContext)!\n );\n const arrayUnion = new ArrayRemoveTransformOperation(parsedElements);\n return new FieldTransform(context.path!, arrayUnion);\n }\n\n isEqual(other: FieldValue): boolean {\n // TODO(mrschmidt): Implement isEquals\n return this === other;\n }\n}\n\nexport class NumericIncrementFieldValueImpl extends FieldValueImpl {\n constructor(private readonly _operand: number) {\n super('FieldValue.increment');\n }\n\n toFieldTransform(context: ParseContext): FieldTransform {\n const parseContext = new ParseContext(\n {\n dataSource: UserDataSource.Argument,\n methodName: this._methodName\n },\n context.databaseId,\n context.serializer,\n context.ignoreUndefinedProperties\n );\n const operand = parseData(this._operand, parseContext)!;\n const numericIncrement = new NumericIncrementTransformOperation(\n context.serializer,\n operand\n );\n return new FieldTransform(context.path!, numericIncrement);\n }\n\n isEqual(other: FieldValue): boolean {\n // TODO(mrschmidt): Implement isEquals\n return this === other;\n }\n}\n\nexport class FieldValue implements firestore.FieldValue {\n static delete(): FieldValueImpl {\n validateNoArgs('FieldValue.delete', arguments);\n return new DeleteFieldValueImpl();\n }\n\n static serverTimestamp(): FieldValueImpl {\n validateNoArgs('FieldValue.serverTimestamp', arguments);\n return new ServerTimestampFieldValueImpl();\n }\n\n static arrayUnion(...elements: unknown[]): FieldValueImpl {\n validateAtLeastNumberOfArgs('FieldValue.arrayUnion', arguments, 1);\n // NOTE: We don't actually parse the data until it's used in set() or\n // update() since we need access to the Firestore instance.\n return new ArrayUnionFieldValueImpl(elements);\n }\n\n static arrayRemove(...elements: unknown[]): FieldValueImpl {\n validateAtLeastNumberOfArgs('FieldValue.arrayRemove', arguments, 1);\n // NOTE: We don't actually parse the data until it's used in set() or\n // update() since we need access to the Firestore instance.\n return new ArrayRemoveFieldValueImpl(elements);\n }\n\n static increment(n: number): FieldValueImpl {\n validateArgType('FieldValue.increment', 'number', 1, n);\n validateExactNumberOfArgs('FieldValue.increment', arguments, 1);\n return new NumericIncrementFieldValueImpl(n);\n }\n\n isEqual(other: FieldValue): boolean {\n return this === other;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Code, FirestoreError } from '../util/error';\nimport {\n validateArgType,\n validateExactNumberOfArgs\n} from '../util/input_validation';\nimport { primitiveComparator } from '../util/misc';\n\n/**\n * Immutable class representing a geo point as latitude-longitude pair.\n * This class is directly exposed in the public API, including its constructor.\n */\nexport class GeoPoint {\n // Prefix with underscore to signal this is a private variable in JS and\n // prevent it showing up for autocompletion when typing latitude or longitude.\n private _lat: number;\n private _long: number;\n\n constructor(latitude: number, longitude: number) {\n validateExactNumberOfArgs('GeoPoint', arguments, 2);\n validateArgType('GeoPoint', 'number', 1, latitude);\n validateArgType('GeoPoint', 'number', 2, longitude);\n if (!isFinite(latitude) || latitude < -90 || latitude > 90) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Latitude must be a number between -90 and 90, but was: ' + latitude\n );\n }\n if (!isFinite(longitude) || longitude < -180 || longitude > 180) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Longitude must be a number between -180 and 180, but was: ' + longitude\n );\n }\n\n this._lat = latitude;\n this._long = longitude;\n }\n\n /**\n * Returns the latitude of this geo point, a number between -90 and 90.\n */\n get latitude(): number {\n return this._lat;\n }\n\n /**\n * Returns the longitude of this geo point, a number between -180 and 180.\n */\n get longitude(): number {\n return this._long;\n }\n\n isEqual(other: GeoPoint): boolean {\n return this._lat === other._lat && this._long === other._long;\n }\n\n /**\n * Actually private to JS consumers of our API, so this function is prefixed\n * with an underscore.\n */\n _compareTo(other: GeoPoint): number {\n return (\n primitiveComparator(this._lat, other._lat) ||\n primitiveComparator(this._long, other._long)\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as firestore from '@firebase/firestore-types';\n\nimport * as api from '../protos/firestore_proto_api';\n\nimport { Timestamp } from './timestamp';\nimport { DatabaseId } from '../core/database_info';\nimport { DocumentKey } from '../model/document_key';\nimport {\n FieldMask,\n FieldTransform,\n Mutation,\n PatchMutation,\n Precondition,\n SetMutation,\n TransformMutation\n} from '../model/mutation';\nimport { FieldPath } from '../model/path';\nimport { debugAssert, fail } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport { isPlainObject, valueDescription } from '../util/input_validation';\nimport { Dict, forEach, isEmpty } from '../util/obj';\nimport { ObjectValue, ObjectValueBuilder } from '../model/object_value';\nimport { JsonProtoSerializer } from '../remote/serializer';\nimport { Blob } from './blob';\nimport {\n FieldPath as ExternalFieldPath,\n fromDotSeparatedString\n} from './field_path';\nimport { DeleteFieldValueImpl, FieldValueImpl } from './field_value';\nimport { GeoPoint } from './geo_point';\nimport { PlatformSupport } from '../platform/platform';\nimport { DocumentReference } from './database';\n\nconst RESERVED_FIELD_REGEX = /^__.*__$/;\n\n/** The result of parsing document data (e.g. for a setData call). */\nexport class ParsedSetData {\n constructor(\n readonly data: ObjectValue,\n readonly fieldMask: FieldMask | null,\n readonly fieldTransforms: FieldTransform[]\n ) {}\n\n toMutations(key: DocumentKey, precondition: Precondition): Mutation[] {\n const mutations = [] as Mutation[];\n if (this.fieldMask !== null) {\n mutations.push(\n new PatchMutation(key, this.data, this.fieldMask, precondition)\n );\n } else {\n mutations.push(new SetMutation(key, this.data, precondition));\n }\n if (this.fieldTransforms.length > 0) {\n mutations.push(new TransformMutation(key, this.fieldTransforms));\n }\n return mutations;\n }\n}\n\n/** The result of parsing \"update\" data (i.e. for an updateData call). */\nexport class ParsedUpdateData {\n constructor(\n readonly data: ObjectValue,\n readonly fieldMask: FieldMask,\n readonly fieldTransforms: FieldTransform[]\n ) {}\n\n toMutations(key: DocumentKey, precondition: Precondition): Mutation[] {\n const mutations = [\n new PatchMutation(key, this.data, this.fieldMask, precondition)\n ] as Mutation[];\n if (this.fieldTransforms.length > 0) {\n mutations.push(new TransformMutation(key, this.fieldTransforms));\n }\n return mutations;\n }\n}\n\n/*\n * Represents what type of API method provided the data being parsed; useful\n * for determining which error conditions apply during parsing and providing\n * better error messages.\n */\nexport const enum UserDataSource {\n Set,\n Update,\n MergeSet,\n /**\n * Indicates the source is a where clause, cursor bound, arrayUnion()\n * element, etc. Of note, isWrite(source) will return false.\n */\n Argument,\n /**\n * Indicates that the source is an Argument that may directly contain nested\n * arrays (e.g. the operand of an `in` query).\n */\n ArrayArgument\n}\n\nfunction isWrite(dataSource: UserDataSource): boolean {\n switch (dataSource) {\n case UserDataSource.Set: // fall through\n case UserDataSource.MergeSet: // fall through\n case UserDataSource.Update:\n return true;\n case UserDataSource.Argument:\n case UserDataSource.ArrayArgument:\n return false;\n default:\n throw fail(`Unexpected case for UserDataSource: ${dataSource}`);\n }\n}\n\n/** Contains the settings that are mutated as we parse user data. */\ninterface ContextSettings {\n /** Indicates what kind of API method this data came from. */\n readonly dataSource: UserDataSource;\n /** The name of the method the user called to create the ParseContext. */\n readonly methodName: string;\n /**\n * A path within the object being parsed. This could be an empty path (in\n * which case the context represents the root of the data being parsed), or a\n * nonempty path (indicating the context represents a nested location within\n * the data).\n */\n readonly path?: FieldPath;\n /**\n * Whether or not this context corresponds to an element of an array.\n * If not set, elements are treated as if they were outside of arrays.\n */\n readonly arrayElement?: boolean;\n}\n\n/** A \"context\" object passed around while parsing user data. */\nexport class ParseContext {\n readonly fieldTransforms: FieldTransform[];\n readonly fieldMask: FieldPath[];\n /**\n * Initializes a ParseContext with the given source and path.\n *\n * @param settings The settings for the parser.\n * @param databaseId The database ID of the Firestore instance.\n * @param serializer The serializer to use to generate the Value proto.\n * @param ignoreUndefinedProperties Whether to ignore undefined properties\n * rather than throw.\n * @param fieldTransforms A mutable list of field transforms encountered while\n * parsing the data.\n * @param fieldMask A mutable list of field paths encountered while parsing\n * the data.\n *\n * TODO(b/34871131): We don't support array paths right now, so path can be\n * null to indicate the context represents any location within an array (in\n * which case certain features will not work and errors will be somewhat\n * compromised).\n */\n constructor(\n readonly settings: ContextSettings,\n readonly databaseId: DatabaseId,\n readonly serializer: JsonProtoSerializer,\n readonly ignoreUndefinedProperties: boolean,\n fieldTransforms?: FieldTransform[],\n fieldMask?: FieldPath[]\n ) {\n // Minor hack: If fieldTransforms is undefined, we assume this is an\n // external call and we need to validate the entire path.\n if (fieldTransforms === undefined) {\n this.validatePath();\n }\n this.fieldTransforms = fieldTransforms || [];\n this.fieldMask = fieldMask || [];\n }\n\n get path(): FieldPath | undefined {\n return this.settings.path;\n }\n\n get dataSource(): UserDataSource {\n return this.settings.dataSource;\n }\n\n /** Returns a new context with the specified settings overwritten. */\n contextWith(configuration: Partial<ContextSettings>): ParseContext {\n return new ParseContext(\n { ...this.settings, ...configuration },\n this.databaseId,\n this.serializer,\n this.ignoreUndefinedProperties,\n this.fieldTransforms,\n this.fieldMask\n );\n }\n\n childContextForField(field: string): ParseContext {\n const childPath = this.path?.child(field);\n const context = this.contextWith({ path: childPath, arrayElement: false });\n context.validatePathSegment(field);\n return context;\n }\n\n childContextForFieldPath(field: FieldPath): ParseContext {\n const childPath = this.path?.child(field);\n const context = this.contextWith({ path: childPath, arrayElement: false });\n context.validatePath();\n return context;\n }\n\n childContextForArray(index: number): ParseContext {\n // TODO(b/34871131): We don't support array paths right now; so make path\n // undefined.\n return this.contextWith({ path: undefined, arrayElement: true });\n }\n\n createError(reason: string): Error {\n const fieldDescription =\n !this.path || this.path.isEmpty()\n ? ''\n : ` (found in field ${this.path.toString()})`;\n return new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${this.settings.methodName}() called with invalid data. ` +\n reason +\n fieldDescription\n );\n }\n\n /** Returns 'true' if 'fieldPath' was traversed when creating this context. */\n contains(fieldPath: FieldPath): boolean {\n return (\n this.fieldMask.find(field => fieldPath.isPrefixOf(field)) !== undefined ||\n this.fieldTransforms.find(transform =>\n fieldPath.isPrefixOf(transform.field)\n ) !== undefined\n );\n }\n\n private validatePath(): void {\n // TODO(b/34871131): Remove null check once we have proper paths for fields\n // within arrays.\n if (!this.path) {\n return;\n }\n for (let i = 0; i < this.path.length; i++) {\n this.validatePathSegment(this.path.get(i));\n }\n }\n\n private validatePathSegment(segment: string): void {\n if (segment.length === 0) {\n throw this.createError('Document fields must not be empty');\n }\n if (isWrite(this.dataSource) && RESERVED_FIELD_REGEX.test(segment)) {\n throw this.createError('Document fields cannot begin and end with \"__\"');\n }\n }\n}\n\n/**\n * Helper for parsing raw user input (provided via the API) into internal model\n * classes.\n */\nexport class UserDataReader {\n private readonly serializer: JsonProtoSerializer;\n\n constructor(\n private readonly databaseId: DatabaseId,\n private readonly ignoreUndefinedProperties: boolean,\n serializer?: JsonProtoSerializer\n ) {\n this.serializer =\n serializer || PlatformSupport.getPlatform().newSerializer(databaseId);\n }\n\n /** Parse document data from a non-merge set() call. */\n parseSetData(methodName: string, input: unknown): ParsedSetData {\n const context = this.createContext(UserDataSource.Set, methodName);\n validatePlainObject('Data must be an object, but it was:', context, input);\n const updateData = parseObject(input, context)!;\n\n return new ParsedSetData(\n new ObjectValue(updateData),\n /* fieldMask= */ null,\n context.fieldTransforms\n );\n }\n\n /** Parse document data from a set() call with '{merge:true}'. */\n parseMergeData(\n methodName: string,\n input: unknown,\n fieldPaths?: Array<string | firestore.FieldPath>\n ): ParsedSetData {\n const context = this.createContext(UserDataSource.MergeSet, methodName);\n validatePlainObject('Data must be an object, but it was:', context, input);\n const updateData = parseObject(input, context);\n\n let fieldMask: FieldMask;\n let fieldTransforms: FieldTransform[];\n\n if (!fieldPaths) {\n fieldMask = new FieldMask(context.fieldMask);\n fieldTransforms = context.fieldTransforms;\n } else {\n const validatedFieldPaths: FieldPath[] = [];\n\n for (const stringOrFieldPath of fieldPaths) {\n let fieldPath: FieldPath;\n\n if (stringOrFieldPath instanceof ExternalFieldPath) {\n fieldPath = stringOrFieldPath._internalPath;\n } else if (typeof stringOrFieldPath === 'string') {\n fieldPath = fieldPathFromDotSeparatedString(\n methodName,\n stringOrFieldPath\n );\n } else {\n throw fail(\n 'Expected stringOrFieldPath to be a string or a FieldPath'\n );\n }\n\n if (!context.contains(fieldPath)) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Field '${fieldPath}' is specified in your field mask but missing from your input data.`\n );\n }\n\n if (!fieldMaskContains(validatedFieldPaths, fieldPath)) {\n validatedFieldPaths.push(fieldPath);\n }\n }\n\n fieldMask = new FieldMask(validatedFieldPaths);\n fieldTransforms = context.fieldTransforms.filter(transform =>\n fieldMask.covers(transform.field)\n );\n }\n return new ParsedSetData(\n new ObjectValue(updateData),\n fieldMask,\n fieldTransforms\n );\n }\n\n /** Parse update data from an update() call. */\n parseUpdateData(methodName: string, input: unknown): ParsedUpdateData {\n const context = this.createContext(UserDataSource.Update, methodName);\n validatePlainObject('Data must be an object, but it was:', context, input);\n\n const fieldMaskPaths: FieldPath[] = [];\n const updateData = new ObjectValueBuilder();\n forEach(input as Dict<unknown>, (key, value) => {\n const path = fieldPathFromDotSeparatedString(methodName, key);\n\n const childContext = context.childContextForFieldPath(path);\n if (value instanceof DeleteFieldValueImpl) {\n // Add it to the field mask, but don't add anything to updateData.\n fieldMaskPaths.push(path);\n } else {\n const parsedValue = parseData(value, childContext);\n if (parsedValue != null) {\n fieldMaskPaths.push(path);\n updateData.set(path, parsedValue);\n }\n }\n });\n\n const mask = new FieldMask(fieldMaskPaths);\n return new ParsedUpdateData(\n updateData.build(),\n mask,\n context.fieldTransforms\n );\n }\n\n /** Parse update data from a list of field/value arguments. */\n parseUpdateVarargs(\n methodName: string,\n field: string | ExternalFieldPath,\n value: unknown,\n moreFieldsAndValues: unknown[]\n ): ParsedUpdateData {\n const context = this.createContext(UserDataSource.Update, methodName);\n const keys = [fieldPathFromArgument(methodName, field)];\n const values = [value];\n\n if (moreFieldsAndValues.length % 2 !== 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${methodName}() needs to be called with an even number ` +\n 'of arguments that alternate between field names and values.'\n );\n }\n\n for (let i = 0; i < moreFieldsAndValues.length; i += 2) {\n keys.push(\n fieldPathFromArgument(\n methodName,\n moreFieldsAndValues[i] as string | ExternalFieldPath\n )\n );\n values.push(moreFieldsAndValues[i + 1]);\n }\n\n const fieldMaskPaths: FieldPath[] = [];\n const updateData = new ObjectValueBuilder();\n\n // We iterate in reverse order to pick the last value for a field if the\n // user specified the field multiple times.\n for (let i = keys.length - 1; i >= 0; --i) {\n if (!fieldMaskContains(fieldMaskPaths, keys[i])) {\n const path = keys[i];\n const value = values[i];\n const childContext = context.childContextForFieldPath(path);\n if (value instanceof DeleteFieldValueImpl) {\n // Add it to the field mask, but don't add anything to updateData.\n fieldMaskPaths.push(path);\n } else {\n const parsedValue = parseData(value, childContext);\n if (parsedValue != null) {\n fieldMaskPaths.push(path);\n updateData.set(path, parsedValue);\n }\n }\n }\n }\n\n const mask = new FieldMask(fieldMaskPaths);\n return new ParsedUpdateData(\n updateData.build(),\n mask,\n context.fieldTransforms\n );\n }\n\n /** Creates a new top-level parse context. */\n private createContext(\n dataSource: UserDataSource,\n methodName: string\n ): ParseContext {\n return new ParseContext(\n {\n dataSource,\n methodName,\n path: FieldPath.EMPTY_PATH,\n arrayElement: false\n },\n this.databaseId,\n this.serializer,\n this.ignoreUndefinedProperties\n );\n }\n\n /**\n * Parse a \"query value\" (e.g. value in a where filter or a value in a cursor\n * bound).\n *\n * @param allowArrays Whether the query value is an array that may directly\n * contain additional arrays (e.g. the operand of an `in` query).\n */\n parseQueryValue(\n methodName: string,\n input: unknown,\n allowArrays = false\n ): api.Value {\n const context = this.createContext(\n allowArrays ? UserDataSource.ArrayArgument : UserDataSource.Argument,\n methodName\n );\n const parsed = parseData(input, context);\n debugAssert(parsed != null, 'Parsed data should not be null.');\n debugAssert(\n context.fieldTransforms.length === 0,\n 'Field transforms should have been disallowed.'\n );\n return parsed;\n }\n}\n\n/**\n * Parses user data to Protobuf Values.\n *\n * @param input Data to be parsed.\n * @param context A context object representing the current path being parsed,\n * the source of the data being parsed, etc.\n * @return The parsed value, or null if the value was a FieldValue sentinel\n * that should not be included in the resulting parsed data.\n */\nexport function parseData(\n input: unknown,\n context: ParseContext\n): api.Value | null {\n if (looksLikeJsonObject(input)) {\n validatePlainObject('Unsupported field value:', context, input);\n return parseObject(input, context);\n } else if (input instanceof FieldValueImpl) {\n // FieldValues usually parse into transforms (except FieldValue.delete())\n // in which case we do not want to include this field in our parsed data\n // (as doing so will overwrite the field directly prior to the transform\n // trying to transform it). So we don't add this location to\n // context.fieldMask and we return null as our parsing result.\n parseSentinelFieldValue(input, context);\n return null;\n } else {\n // If context.path is null we are inside an array and we don't support\n // field mask paths more granular than the top-level array.\n if (context.path) {\n context.fieldMask.push(context.path);\n }\n\n if (input instanceof Array) {\n // TODO(b/34871131): Include the path containing the array in the error\n // message.\n // In the case of IN queries, the parsed data is an array (representing\n // the set of values to be included for the IN query) that may directly\n // contain additional arrays (each representing an individual field\n // value), so we disable this validation.\n if (\n context.settings.arrayElement &&\n context.dataSource !== UserDataSource.ArrayArgument\n ) {\n throw context.createError('Nested arrays are not supported');\n }\n return parseArray(input as unknown[], context);\n } else {\n return parseScalarValue(input, context);\n }\n }\n}\n\nfunction parseObject(\n obj: Dict<unknown>,\n context: ParseContext\n): { mapValue: api.MapValue } {\n const fields: Dict<api.Value> = {};\n\n if (isEmpty(obj)) {\n // If we encounter an empty object, we explicitly add it to the update\n // mask to ensure that the server creates a map entry.\n if (context.path && context.path.length > 0) {\n context.fieldMask.push(context.path);\n }\n } else {\n forEach(obj, (key: string, val: unknown) => {\n const parsedValue = parseData(val, context.childContextForField(key));\n if (parsedValue != null) {\n fields[key] = parsedValue;\n }\n });\n }\n\n return { mapValue: { fields } };\n}\n\nfunction parseArray(array: unknown[], context: ParseContext): api.Value {\n const values: api.Value[] = [];\n let entryIndex = 0;\n for (const entry of array) {\n let parsedEntry = parseData(\n entry,\n context.childContextForArray(entryIndex)\n );\n if (parsedEntry == null) {\n // Just include nulls in the array for fields being replaced with a\n // sentinel.\n parsedEntry = { nullValue: 'NULL_VALUE' };\n }\n values.push(parsedEntry);\n entryIndex++;\n }\n return { arrayValue: { values } };\n}\n\n/**\n * \"Parses\" the provided FieldValueImpl, adding any necessary transforms to\n * context.fieldTransforms.\n */\nfunction parseSentinelFieldValue(\n value: FieldValueImpl,\n context: ParseContext\n): void {\n // Sentinels are only supported with writes, and not within arrays.\n if (!isWrite(context.dataSource)) {\n throw context.createError(\n `${value._methodName}() can only be used with update() and set()`\n );\n }\n if (context.path === null) {\n throw context.createError(\n `${value._methodName}() is not currently supported inside arrays`\n );\n }\n\n const fieldTransform = value.toFieldTransform(context);\n if (fieldTransform) {\n context.fieldTransforms.push(fieldTransform);\n }\n}\n\n/**\n * Helper to parse a scalar value (i.e. not an Object, Array, or FieldValue)\n *\n * @return The parsed value\n */\nfunction parseScalarValue(\n value: unknown,\n context: ParseContext\n): api.Value | null {\n if (value === null) {\n return { nullValue: 'NULL_VALUE' };\n } else if (typeof value === 'number') {\n return context.serializer.toNumber(value);\n } else if (typeof value === 'boolean') {\n return { booleanValue: value };\n } else if (typeof value === 'string') {\n return { stringValue: value };\n } else if (value instanceof Date) {\n const timestamp = Timestamp.fromDate(value);\n return { timestampValue: context.serializer.toTimestamp(timestamp) };\n } else if (value instanceof Timestamp) {\n // Firestore backend truncates precision down to microseconds. To ensure\n // offline mode works the same with regards to truncation, perform the\n // truncation immediately without waiting for the backend to do that.\n const timestamp = new Timestamp(\n value.seconds,\n Math.floor(value.nanoseconds / 1000) * 1000\n );\n return { timestampValue: context.serializer.toTimestamp(timestamp) };\n } else if (value instanceof GeoPoint) {\n return {\n geoPointValue: {\n latitude: value.latitude,\n longitude: value.longitude\n }\n };\n } else if (value instanceof Blob) {\n return { bytesValue: context.serializer.toBytes(value) };\n } else if (value instanceof DocumentReference) {\n const thisDb = context.databaseId;\n const otherDb = value.firestore._databaseId;\n if (!otherDb.isEqual(thisDb)) {\n throw context.createError(\n 'Document reference is for database ' +\n `${otherDb.projectId}/${otherDb.database} but should be ` +\n `for database ${thisDb.projectId}/${thisDb.database}`\n );\n }\n return {\n referenceValue: context.serializer.toResourceName(\n value._key.path,\n value.firestore._databaseId\n )\n };\n } else if (value === undefined && context.ignoreUndefinedProperties) {\n return null;\n } else {\n throw context.createError(\n `Unsupported field value: ${valueDescription(value)}`\n );\n }\n}\n\n/**\n * Checks whether an object looks like a JSON object that should be converted\n * into a struct. Normal class/prototype instances are considered to look like\n * JSON objects since they should be converted to a struct value. Arrays, Dates,\n * GeoPoints, etc. are not considered to look like JSON objects since they map\n * to specific FieldValue types other than ObjectValue.\n */\nfunction looksLikeJsonObject(input: unknown): boolean {\n return (\n typeof input === 'object' &&\n input !== null &&\n !(input instanceof Array) &&\n !(input instanceof Date) &&\n !(input instanceof Timestamp) &&\n !(input instanceof GeoPoint) &&\n !(input instanceof Blob) &&\n !(input instanceof DocumentReference) &&\n !(input instanceof FieldValueImpl)\n );\n}\n\nfunction validatePlainObject(\n message: string,\n context: ParseContext,\n input: unknown\n): asserts input is Dict<unknown> {\n if (!looksLikeJsonObject(input) || !isPlainObject(input)) {\n const description = valueDescription(input);\n if (description === 'an object') {\n // Massage the error if it was an object.\n throw context.createError(message + ' a custom object');\n } else {\n throw context.createError(message + ' ' + description);\n }\n }\n}\n\n/**\n * Helper that calls fromDotSeparatedString() but wraps any error thrown.\n */\nexport function fieldPathFromArgument(\n methodName: string,\n path: string | ExternalFieldPath\n): FieldPath {\n if (path instanceof ExternalFieldPath) {\n return path._internalPath;\n } else if (typeof path === 'string') {\n return fieldPathFromDotSeparatedString(methodName, path);\n } else {\n const message = 'Field path arguments must be of type string or FieldPath.';\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${methodName}() called with invalid data. ${message}`\n );\n }\n}\n\n/**\n * Wraps fromDotSeparatedString with an error message about the method that\n * was thrown.\n * @param methodName The publicly visible method name\n * @param path The dot-separated string form of a field path which will be split\n * on dots.\n */\nfunction fieldPathFromDotSeparatedString(\n methodName: string,\n path: string\n): FieldPath {\n try {\n return fromDotSeparatedString(path)._internalPath;\n } catch (e) {\n const message = errorMessage(e);\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function ${methodName}() called with invalid data. ${message}`\n );\n }\n}\n\n/**\n * Extracts the message from a caught exception, which should be an Error object\n * though JS doesn't guarantee that.\n */\nfunction errorMessage(error: Error | object): string {\n return error instanceof Error ? error.message : error.toString();\n}\n\n/** Checks `haystack` if FieldPath `needle` is present. Runs in O(n). */\nfunction fieldMaskContains(haystack: FieldPath[], needle: FieldPath): boolean {\n return haystack.some(v => v.isEqual(needle));\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { CredentialsProvider, Token } from '../api/credentials';\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { TargetId } from '../core/types';\nimport { TargetData } from '../local/target_data';\nimport { Mutation, MutationResult } from '../model/mutation';\nimport * as api from '../protos/firestore_proto_api';\nimport { hardAssert, debugAssert } from '../util/assert';\nimport { AsyncQueue, DelayedOperation, TimerId } from '../util/async_queue';\nimport { Code, FirestoreError } from '../util/error';\nimport { logError, logDebug } from '../util/log';\n\nimport { isNullOrUndefined } from '../util/types';\nimport { ExponentialBackoff } from './backoff';\nimport { Connection, Stream } from './connection';\nimport { JsonProtoSerializer } from './serializer';\nimport { WatchChange } from './watch_change';\nimport { ByteString } from '../util/byte_string';\n\nconst LOG_TAG = 'PersistentStream';\n\n// The generated proto interfaces for these class are missing the database\n// field. So we add it here.\n// TODO(b/36015800): Remove this once the api generator is fixed.\ninterface ListenRequest extends api.ListenRequest {\n database?: string;\n}\nexport interface WriteRequest extends api.WriteRequest {\n database?: string;\n}\n/**\n * PersistentStream can be in one of 5 states (each described in detail below)\n * based on the following state transition diagram:\n *\n * start() called auth & connection succeeded\n * INITIAL ----------------> STARTING -----------------------------> OPEN\n * ^ | |\n * | | error occurred |\n * | \\-----------------------------v-----/\n * | |\n * backoff | |\n * elapsed | start() called |\n * \\--- BACKOFF <---------------- ERROR\n *\n * [any state] --------------------------> INITIAL\n * stop() called or\n * idle timer expired\n */\nconst enum PersistentStreamState {\n /**\n * The streaming RPC is not yet running and there's no error condition.\n * Calling start() will start the stream immediately without backoff.\n * While in this state isStarted() will return false.\n */\n Initial,\n\n /**\n * The stream is starting, either waiting for an auth token or for the stream\n * to successfully open. While in this state, isStarted() will return true but\n * isOpen() will return false.\n */\n Starting,\n\n /**\n * The streaming RPC is up and running. Requests and responses can flow\n * freely. Both isStarted() and isOpen() will return true.\n */\n Open,\n\n /**\n * The stream encountered an error. The next start attempt will back off.\n * While in this state isStarted() will return false.\n */\n Error,\n\n /**\n * An in-between state after an error where the stream is waiting before\n * re-starting. After waiting is complete, the stream will try to open.\n * While in this state isStarted() will return true but isOpen() will return\n * false.\n */\n Backoff\n}\n\n/**\n * Provides a common interface that is shared by the listeners for stream\n * events by the concrete implementation classes.\n */\nexport interface PersistentStreamListener {\n /**\n * Called after the stream was established and can accept outgoing\n * messages\n */\n onOpen: () => Promise<void>;\n /**\n * Called after the stream has closed. If there was an error, the\n * FirestoreError will be set.\n */\n onClose: (err?: FirestoreError) => Promise<void>;\n}\n\n/** The time a stream stays open after it is marked idle. */\nconst IDLE_TIMEOUT_MS = 60 * 1000;\n\n/**\n * A PersistentStream is an abstract base class that represents a streaming RPC\n * to the Firestore backend. It's built on top of the connections own support\n * for streaming RPCs, and adds several critical features for our clients:\n *\n * - Exponential backoff on failure\n * - Authentication via CredentialsProvider\n * - Dispatching all callbacks into the shared worker queue\n * - Closing idle streams after 60 seconds of inactivity\n *\n * Subclasses of PersistentStream implement serialization of models to and\n * from the JSON representation of the protocol buffers for a specific\n * streaming RPC.\n *\n * ## Starting and Stopping\n *\n * Streaming RPCs are stateful and need to be start()ed before messages can\n * be sent and received. The PersistentStream will call the onOpen() function\n * of the listener once the stream is ready to accept requests.\n *\n * Should a start() fail, PersistentStream will call the registered onClose()\n * listener with a FirestoreError indicating what went wrong.\n *\n * A PersistentStream can be started and stopped repeatedly.\n *\n * Generic types:\n * SendType: The type of the outgoing message of the underlying\n * connection stream\n * ReceiveType: The type of the incoming message of the underlying\n * connection stream\n * ListenerType: The type of the listener that will be used for callbacks\n */\nexport abstract class PersistentStream<\n SendType,\n ReceiveType,\n ListenerType extends PersistentStreamListener\n> {\n private state = PersistentStreamState.Initial;\n /**\n * A close count that's incremented every time the stream is closed; used by\n * getCloseGuardedDispatcher() to invalidate callbacks that happen after\n * close.\n */\n private closeCount = 0;\n\n private idleTimer: DelayedOperation<void> | null = null;\n private stream: Stream<SendType, ReceiveType> | null = null;\n\n protected backoff: ExponentialBackoff;\n\n constructor(\n private queue: AsyncQueue,\n connectionTimerId: TimerId,\n private idleTimerId: TimerId,\n protected connection: Connection,\n private credentialsProvider: CredentialsProvider,\n protected listener: ListenerType\n ) {\n this.backoff = new ExponentialBackoff(queue, connectionTimerId);\n }\n\n /**\n * Returns true if start() has been called and no error has occurred. True\n * indicates the stream is open or in the process of opening (which\n * encompasses respecting backoff, getting auth tokens, and starting the\n * actual RPC). Use isOpen() to determine if the stream is open and ready for\n * outbound requests.\n */\n isStarted(): boolean {\n return (\n this.state === PersistentStreamState.Starting ||\n this.state === PersistentStreamState.Open ||\n this.state === PersistentStreamState.Backoff\n );\n }\n\n /**\n * Returns true if the underlying RPC is open (the onOpen() listener has been\n * called) and the stream is ready for outbound requests.\n */\n isOpen(): boolean {\n return this.state === PersistentStreamState.Open;\n }\n\n /**\n * Starts the RPC. Only allowed if isStarted() returns false. The stream is\n * not immediately ready for use: onOpen() will be invoked when the RPC is\n * ready for outbound requests, at which point isOpen() will return true.\n *\n * When start returns, isStarted() will return true.\n */\n start(): void {\n if (this.state === PersistentStreamState.Error) {\n this.performBackoff();\n return;\n }\n\n debugAssert(\n this.state === PersistentStreamState.Initial,\n 'Already started'\n );\n this.auth();\n }\n\n /**\n * Stops the RPC. This call is idempotent and allowed regardless of the\n * current isStarted() state.\n *\n * When stop returns, isStarted() and isOpen() will both return false.\n */\n async stop(): Promise<void> {\n if (this.isStarted()) {\n await this.close(PersistentStreamState.Initial);\n }\n }\n\n /**\n * After an error the stream will usually back off on the next attempt to\n * start it. If the error warrants an immediate restart of the stream, the\n * sender can use this to indicate that the receiver should not back off.\n *\n * Each error will call the onClose() listener. That function can decide to\n * inhibit backoff if required.\n */\n inhibitBackoff(): void {\n debugAssert(\n !this.isStarted(),\n 'Can only inhibit backoff in a stopped state'\n );\n\n this.state = PersistentStreamState.Initial;\n this.backoff.reset();\n }\n\n /**\n * Marks this stream as idle. If no further actions are performed on the\n * stream for one minute, the stream will automatically close itself and\n * notify the stream's onClose() handler with Status.OK. The stream will then\n * be in a !isStarted() state, requiring the caller to start the stream again\n * before further use.\n *\n * Only streams that are in state 'Open' can be marked idle, as all other\n * states imply pending network operations.\n */\n markIdle(): void {\n // Starts the idle time if we are in state 'Open' and are not yet already\n // running a timer (in which case the previous idle timeout still applies).\n if (this.isOpen() && this.idleTimer === null) {\n this.idleTimer = this.queue.enqueueAfterDelay(\n this.idleTimerId,\n IDLE_TIMEOUT_MS,\n () => this.handleIdleCloseTimer()\n );\n }\n }\n\n /** Sends a message to the underlying stream. */\n protected sendRequest(msg: SendType): void {\n this.cancelIdleCheck();\n this.stream!.send(msg);\n }\n\n /** Called by the idle timer when the stream should close due to inactivity. */\n private async handleIdleCloseTimer(): Promise<void> {\n if (this.isOpen()) {\n // When timing out an idle stream there's no reason to force the stream into backoff when\n // it restarts so set the stream state to Initial instead of Error.\n return this.close(PersistentStreamState.Initial);\n }\n }\n\n /** Marks the stream as active again. */\n private cancelIdleCheck(): void {\n if (this.idleTimer) {\n this.idleTimer.cancel();\n this.idleTimer = null;\n }\n }\n\n /**\n * Closes the stream and cleans up as necessary:\n *\n * * closes the underlying GRPC stream;\n * * calls the onClose handler with the given 'error';\n * * sets internal stream state to 'finalState';\n * * adjusts the backoff timer based on the error\n *\n * A new stream can be opened by calling start().\n *\n * @param finalState the intended state of the stream after closing.\n * @param error the error the connection was closed with.\n */\n private async close(\n finalState: PersistentStreamState,\n error?: FirestoreError\n ): Promise<void> {\n debugAssert(this.isStarted(), 'Only started streams should be closed.');\n debugAssert(\n finalState === PersistentStreamState.Error || isNullOrUndefined(error),\n \"Can't provide an error when not in an error state.\"\n );\n\n // Cancel any outstanding timers (they're guaranteed not to execute).\n this.cancelIdleCheck();\n this.backoff.cancel();\n\n // Invalidates any stream-related callbacks (e.g. from auth or the\n // underlying stream), guaranteeing they won't execute.\n this.closeCount++;\n\n if (finalState !== PersistentStreamState.Error) {\n // If this is an intentional close ensure we don't delay our next connection attempt.\n this.backoff.reset();\n } else if (error && error.code === Code.RESOURCE_EXHAUSTED) {\n // Log the error. (Probably either 'quota exceeded' or 'max queue length reached'.)\n logError(error.toString());\n logError(\n 'Using maximum backoff delay to prevent overloading the backend.'\n );\n this.backoff.resetToMax();\n } else if (error && error.code === Code.UNAUTHENTICATED) {\n // \"unauthenticated\" error means the token was rejected. Try force refreshing it in case it\n // just expired.\n this.credentialsProvider.invalidateToken();\n }\n\n // Clean up the underlying stream because we are no longer interested in events.\n if (this.stream !== null) {\n this.tearDown();\n this.stream.close();\n this.stream = null;\n }\n\n // This state must be assigned before calling onClose() to allow the callback to\n // inhibit backoff or otherwise manipulate the state in its non-started state.\n this.state = finalState;\n\n // Notify the listener that the stream closed.\n await this.listener.onClose(error);\n }\n\n /**\n * Can be overridden to perform additional cleanup before the stream is closed.\n * Calling super.tearDown() is not required.\n */\n protected tearDown(): void {}\n\n /**\n * Used by subclasses to start the concrete RPC and return the underlying\n * connection stream.\n */\n protected abstract startRpc(\n token: Token | null\n ): Stream<SendType, ReceiveType>;\n\n /**\n * Called after the stream has received a message. The function will be\n * called on the right queue and must return a Promise.\n * @param message The message received from the stream.\n */\n protected abstract onMessage(message: ReceiveType): Promise<void>;\n\n private auth(): void {\n debugAssert(\n this.state === PersistentStreamState.Initial,\n 'Must be in initial state to auth'\n );\n\n this.state = PersistentStreamState.Starting;\n\n const dispatchIfNotClosed = this.getCloseGuardedDispatcher(this.closeCount);\n\n // TODO(mikelehen): Just use dispatchIfNotClosed, but see TODO below.\n const closeCount = this.closeCount;\n\n this.credentialsProvider.getToken().then(\n token => {\n // Stream can be stopped while waiting for authentication.\n // TODO(mikelehen): We really should just use dispatchIfNotClosed\n // and let this dispatch onto the queue, but that opened a spec test can\n // of worms that I don't want to deal with in this PR.\n if (this.closeCount === closeCount) {\n // Normally we'd have to schedule the callback on the AsyncQueue.\n // However, the following calls are safe to be called outside the\n // AsyncQueue since they don't chain asynchronous calls\n this.startStream(token);\n }\n },\n (error: Error) => {\n dispatchIfNotClosed(() => {\n const rpcError = new FirestoreError(\n Code.UNKNOWN,\n 'Fetching auth token failed: ' + error.message\n );\n return this.handleStreamClose(rpcError);\n });\n }\n );\n }\n\n private startStream(token: Token | null): void {\n debugAssert(\n this.state === PersistentStreamState.Starting,\n 'Trying to start stream in a non-starting state'\n );\n\n const dispatchIfNotClosed = this.getCloseGuardedDispatcher(this.closeCount);\n\n this.stream = this.startRpc(token);\n this.stream.onOpen(() => {\n dispatchIfNotClosed(() => {\n debugAssert(\n this.state === PersistentStreamState.Starting,\n 'Expected stream to be in state Starting, but was ' + this.state\n );\n this.state = PersistentStreamState.Open;\n return this.listener!.onOpen();\n });\n });\n this.stream.onClose((error?: FirestoreError) => {\n dispatchIfNotClosed(() => {\n return this.handleStreamClose(error);\n });\n });\n this.stream.onMessage((msg: ReceiveType) => {\n dispatchIfNotClosed(() => {\n return this.onMessage(msg);\n });\n });\n }\n\n private performBackoff(): void {\n debugAssert(\n this.state === PersistentStreamState.Error,\n 'Should only perform backoff when in Error state'\n );\n this.state = PersistentStreamState.Backoff;\n\n this.backoff.backoffAndRun(async () => {\n debugAssert(\n this.state === PersistentStreamState.Backoff,\n 'Backoff elapsed but state is now: ' + this.state\n );\n\n this.state = PersistentStreamState.Initial;\n this.start();\n debugAssert(this.isStarted(), 'PersistentStream should have started');\n });\n }\n\n // Visible for tests\n handleStreamClose(error?: FirestoreError): Promise<void> {\n debugAssert(\n this.isStarted(),\n \"Can't handle server close on non-started stream\"\n );\n logDebug(LOG_TAG, `close with error: ${error}`);\n\n this.stream = null;\n\n // In theory the stream could close cleanly, however, in our current model\n // we never expect this to happen because if we stop a stream ourselves,\n // this callback will never be called. To prevent cases where we retry\n // without a backoff accidentally, we set the stream to error in all cases.\n return this.close(PersistentStreamState.Error, error);\n }\n\n /**\n * Returns a \"dispatcher\" function that dispatches operations onto the\n * AsyncQueue but only runs them if closeCount remains unchanged. This allows\n * us to turn auth / stream callbacks into no-ops if the stream is closed /\n * re-opened, etc.\n */\n private getCloseGuardedDispatcher(\n startCloseCount: number\n ): (fn: () => Promise<void>) => void {\n return (fn: () => Promise<void>): void => {\n this.queue.enqueueAndForget(() => {\n if (this.closeCount === startCloseCount) {\n return fn();\n } else {\n logDebug(\n LOG_TAG,\n 'stream callback skipped by getCloseGuardedDispatcher.'\n );\n return Promise.resolve();\n }\n });\n };\n }\n}\n\n/** Listener for the PersistentWatchStream */\nexport interface WatchStreamListener extends PersistentStreamListener {\n /**\n * Called on a watchChange. The snapshot parameter will be MIN if the watch\n * change did not have a snapshot associated with it.\n */\n onWatchChange: (\n watchChange: WatchChange,\n snapshot: SnapshotVersion\n ) => Promise<void>;\n}\n\n/**\n * A PersistentStream that implements the Listen RPC.\n *\n * Once the Listen stream has called the onOpen() listener, any number of\n * listen() and unlisten() calls can be made to control what changes will be\n * sent from the server for ListenResponses.\n */\nexport class PersistentListenStream extends PersistentStream<\n api.ListenRequest,\n api.ListenResponse,\n WatchStreamListener\n> {\n constructor(\n queue: AsyncQueue,\n connection: Connection,\n credentials: CredentialsProvider,\n private serializer: JsonProtoSerializer,\n listener: WatchStreamListener\n ) {\n super(\n queue,\n TimerId.ListenStreamConnectionBackoff,\n TimerId.ListenStreamIdle,\n connection,\n credentials,\n listener\n );\n }\n\n protected startRpc(\n token: Token | null\n ): Stream<api.ListenRequest, api.ListenResponse> {\n return this.connection.openStream<api.ListenRequest, api.ListenResponse>(\n 'Listen',\n token\n );\n }\n\n protected onMessage(watchChangeProto: api.ListenResponse): Promise<void> {\n // A successful response means the stream is healthy\n this.backoff.reset();\n\n const watchChange = this.serializer.fromWatchChange(watchChangeProto);\n const snapshot = this.serializer.versionFromListenResponse(\n watchChangeProto\n );\n return this.listener!.onWatchChange(watchChange, snapshot);\n }\n\n /**\n * Registers interest in the results of the given target. If the target\n * includes a resumeToken it will be included in the request. Results that\n * affect the target will be streamed back as WatchChange messages that\n * reference the targetId.\n */\n watch(targetData: TargetData): void {\n const request: ListenRequest = {};\n request.database = this.serializer.encodedDatabaseId;\n request.addTarget = this.serializer.toTarget(targetData);\n\n const labels = this.serializer.toListenRequestLabels(targetData);\n if (labels) {\n request.labels = labels;\n }\n\n this.sendRequest(request);\n }\n\n /**\n * Unregisters interest in the results of the target associated with the\n * given targetId.\n */\n unwatch(targetId: TargetId): void {\n const request: ListenRequest = {};\n request.database = this.serializer.encodedDatabaseId;\n request.removeTarget = targetId;\n this.sendRequest(request);\n }\n}\n\n/** Listener for the PersistentWriteStream */\nexport interface WriteStreamListener extends PersistentStreamListener {\n /**\n * Called by the PersistentWriteStream upon a successful handshake response\n * from the server, which is the receiver's cue to send any pending writes.\n */\n onHandshakeComplete: () => Promise<void>;\n\n /**\n * Called by the PersistentWriteStream upon receiving a StreamingWriteResponse\n * from the server that contains a mutation result.\n */\n onMutationResult: (\n commitVersion: SnapshotVersion,\n results: MutationResult[]\n ) => Promise<void>;\n}\n\n/**\n * A Stream that implements the Write RPC.\n *\n * The Write RPC requires the caller to maintain special streamToken\n * state in between calls, to help the server understand which responses the\n * client has processed by the time the next request is made. Every response\n * will contain a streamToken; this value must be passed to the next\n * request.\n *\n * After calling start() on this stream, the next request must be a handshake,\n * containing whatever streamToken is on hand. Once a response to this\n * request is received, all pending mutations may be submitted. When\n * submitting multiple batches of mutations at the same time, it's\n * okay to use the same streamToken for the calls to writeMutations.\n *\n * TODO(b/33271235): Use proto types\n */\nexport class PersistentWriteStream extends PersistentStream<\n api.WriteRequest,\n api.WriteResponse,\n WriteStreamListener\n> {\n private handshakeComplete_ = false;\n\n constructor(\n queue: AsyncQueue,\n connection: Connection,\n credentials: CredentialsProvider,\n private serializer: JsonProtoSerializer,\n listener: WriteStreamListener\n ) {\n super(\n queue,\n TimerId.WriteStreamConnectionBackoff,\n TimerId.WriteStreamIdle,\n connection,\n credentials,\n listener\n );\n }\n\n /**\n * The last received stream token from the server, used to acknowledge which\n * responses the client has processed. Stream tokens are opaque checkpoint\n * markers whose only real value is their inclusion in the next request.\n *\n * PersistentWriteStream manages propagating this value from responses to the\n * next request.\n */\n lastStreamToken: ByteString = ByteString.EMPTY_BYTE_STRING;\n\n /**\n * Tracks whether or not a handshake has been successfully exchanged and\n * the stream is ready to accept mutations.\n */\n get handshakeComplete(): boolean {\n return this.handshakeComplete_;\n }\n\n // Override of PersistentStream.start\n start(): void {\n this.handshakeComplete_ = false;\n super.start();\n }\n\n protected tearDown(): void {\n if (this.handshakeComplete_) {\n this.writeMutations([]);\n }\n }\n\n protected startRpc(\n token: Token | null\n ): Stream<api.WriteRequest, api.WriteResponse> {\n return this.connection.openStream<api.WriteRequest, api.WriteResponse>(\n 'Write',\n token\n );\n }\n\n protected onMessage(responseProto: api.WriteResponse): Promise<void> {\n // Always capture the last stream token.\n hardAssert(\n !!responseProto.streamToken,\n 'Got a write response without a stream token'\n );\n this.lastStreamToken = this.serializer.fromBytes(responseProto.streamToken);\n\n if (!this.handshakeComplete_) {\n // The first response is always the handshake response\n hardAssert(\n !responseProto.writeResults || responseProto.writeResults.length === 0,\n 'Got mutation results for handshake'\n );\n this.handshakeComplete_ = true;\n return this.listener!.onHandshakeComplete();\n } else {\n // A successful first write response means the stream is healthy,\n // Note, that we could consider a successful handshake healthy, however,\n // the write itself might be causing an error we want to back off from.\n this.backoff.reset();\n\n const results = this.serializer.fromWriteResults(\n responseProto.writeResults,\n responseProto.commitTime\n );\n const commitVersion = this.serializer.fromVersion(\n responseProto.commitTime!\n );\n return this.listener!.onMutationResult(commitVersion, results);\n }\n }\n\n /**\n * Sends an initial streamToken to the server, performing the handshake\n * required to make the StreamingWrite RPC work. Subsequent\n * calls should wait until onHandshakeComplete was called.\n */\n writeHandshake(): void {\n debugAssert(this.isOpen(), 'Writing handshake requires an opened stream');\n debugAssert(!this.handshakeComplete_, 'Handshake already completed');\n // TODO(dimond): Support stream resumption. We intentionally do not set the\n // stream token on the handshake, ignoring any stream token we might have.\n const request: WriteRequest = {};\n request.database = this.serializer.encodedDatabaseId;\n this.sendRequest(request);\n }\n\n /** Sends a group of mutations to the Firestore backend to apply. */\n writeMutations(mutations: Mutation[]): void {\n debugAssert(this.isOpen(), 'Writing mutations requires an opened stream');\n debugAssert(\n this.handshakeComplete_,\n 'Handshake must be complete before writing mutations'\n );\n debugAssert(\n this.lastStreamToken.approximateByteSize() > 0,\n 'Trying to write mutation without a token'\n );\n\n const request: WriteRequest = {\n streamToken: this.serializer.toBytes(this.lastStreamToken),\n writes: mutations.map(mutation => this.serializer.toMutation(mutation))\n };\n\n this.sendRequest(request);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { CredentialsProvider } from '../api/credentials';\nimport { MaybeDocument, Document } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { Mutation, MutationResult } from '../model/mutation';\nimport * as api from '../protos/firestore_proto_api';\nimport { debugCast, hardAssert } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport { Connection } from './connection';\nimport { JsonProtoSerializer } from './serializer';\nimport {\n PersistentListenStream,\n PersistentWriteStream,\n WatchStreamListener,\n WriteStreamListener\n} from './persistent_stream';\nimport { AsyncQueue } from '../util/async_queue';\nimport { Query } from '../core/query';\n\n/**\n * Datastore and its related methods are a wrapper around the external Google\n * Cloud Datastore grpc API, which provides an interface that is more convenient\n * for the rest of the client SDK architecture to consume.\n */\nexport class Datastore {\n // Make sure that the structural type of `Datastore` is unique.\n // See https://github.com/microsoft/TypeScript/issues/5451\n private _ = undefined;\n}\n\n/**\n * An implementation of Datastore that exposes additional state for internal\n * consumption.\n */\nclass DatastoreImpl extends Datastore {\n constructor(\n public readonly connection: Connection,\n public readonly credentials: CredentialsProvider,\n public readonly serializer: JsonProtoSerializer\n ) {\n super();\n }\n\n /** Gets an auth token and invokes the provided RPC. */\n invokeRPC<Req, Resp>(rpcName: string, request: Req): Promise<Resp> {\n return this.credentials\n .getToken()\n .then(token => {\n return this.connection.invokeRPC<Req, Resp>(rpcName, request, token);\n })\n .catch((error: FirestoreError) => {\n if (error.code === Code.UNAUTHENTICATED) {\n this.credentials.invalidateToken();\n }\n throw error;\n });\n }\n\n /** Gets an auth token and invokes the provided RPC with streamed results. */\n invokeStreamingRPC<Req, Resp>(\n rpcName: string,\n request: Req\n ): Promise<Resp[]> {\n return this.credentials\n .getToken()\n .then(token => {\n return this.connection.invokeStreamingRPC<Req, Resp>(\n rpcName,\n request,\n token\n );\n })\n .catch((error: FirestoreError) => {\n if (error.code === Code.UNAUTHENTICATED) {\n this.credentials.invalidateToken();\n }\n throw error;\n });\n }\n}\n\nexport function newDatastore(\n connection: Connection,\n credentials: CredentialsProvider,\n serializer: JsonProtoSerializer\n): Datastore {\n return new DatastoreImpl(connection, credentials, serializer);\n}\n\nexport async function invokeCommitRpc(\n datastore: Datastore,\n mutations: Mutation[]\n): Promise<MutationResult[]> {\n const datastoreImpl = debugCast(datastore, DatastoreImpl);\n const params = {\n database: datastoreImpl.serializer.encodedDatabaseId,\n writes: mutations.map(m => datastoreImpl.serializer.toMutation(m))\n };\n const response = await datastoreImpl.invokeRPC<\n api.CommitRequest,\n api.CommitResponse\n >('Commit', params);\n return datastoreImpl.serializer.fromWriteResults(\n response.writeResults,\n response.commitTime\n );\n}\n\nexport async function invokeBatchGetDocumentsRpc(\n datastore: Datastore,\n keys: DocumentKey[]\n): Promise<MaybeDocument[]> {\n const datastoreImpl = debugCast(datastore, DatastoreImpl);\n const params = {\n database: datastoreImpl.serializer.encodedDatabaseId,\n documents: keys.map(k => datastoreImpl.serializer.toName(k))\n };\n const response = await datastoreImpl.invokeStreamingRPC<\n api.BatchGetDocumentsRequest,\n api.BatchGetDocumentsResponse\n >('BatchGetDocuments', params);\n\n const docs = new Map<string, MaybeDocument>();\n response.forEach(proto => {\n const doc = datastoreImpl.serializer.fromMaybeDocument(proto);\n docs.set(doc.key.toString(), doc);\n });\n const result: MaybeDocument[] = [];\n keys.forEach(key => {\n const doc = docs.get(key.toString());\n hardAssert(!!doc, 'Missing entity in write response for ' + key);\n result.push(doc);\n });\n return result;\n}\n\nexport async function invokeRunQueryRpc(\n datastore: Datastore,\n query: Query\n): Promise<Document[]> {\n const datastoreImpl = debugCast(datastore, DatastoreImpl);\n const { structuredQuery, parent } = datastoreImpl.serializer.toQueryTarget(\n query.toTarget()\n );\n const params = {\n database: datastoreImpl.serializer.encodedDatabaseId,\n parent,\n structuredQuery\n };\n\n const response = await datastoreImpl.invokeStreamingRPC<\n api.RunQueryRequest,\n api.RunQueryResponse\n >('RunQuery', params);\n\n return (\n response\n // Omit RunQueryResponses that only contain readTimes.\n .filter(proto => !!proto.document)\n .map(proto => datastoreImpl.serializer.fromDocument(proto.document!))\n );\n}\n\nexport function newPersistentWriteStream(\n datastore: Datastore,\n queue: AsyncQueue,\n listener: WriteStreamListener\n): PersistentWriteStream {\n const datastoreImpl = debugCast(datastore, DatastoreImpl);\n return new PersistentWriteStream(\n queue,\n datastoreImpl.connection,\n datastoreImpl.credentials,\n datastoreImpl.serializer,\n listener\n );\n}\n\nexport function newPersistentWatchStream(\n datastore: Datastore,\n queue: AsyncQueue,\n listener: WatchStreamListener\n): PersistentListenStream {\n const datastoreImpl = debugCast(datastore, DatastoreImpl);\n return new PersistentListenStream(\n queue,\n datastoreImpl.connection,\n datastoreImpl.credentials,\n datastoreImpl.serializer,\n listener\n );\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ParsedSetData, ParsedUpdateData } from '../api/user_data_reader';\nimport { documentVersionMap } from '../model/collections';\nimport { Document, MaybeDocument, NoDocument } from '../model/document';\n\nimport { DocumentKey } from '../model/document_key';\nimport {\n DeleteMutation,\n Mutation,\n Precondition,\n VerifyMutation\n} from '../model/mutation';\nimport {\n Datastore,\n invokeBatchGetDocumentsRpc,\n invokeCommitRpc\n} from '../remote/datastore';\nimport { fail, debugAssert } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport { SnapshotVersion } from './snapshot_version';\n\n/**\n * Internal transaction object responsible for accumulating the mutations to\n * perform and the base versions for any documents read.\n */\nexport class Transaction {\n // The version of each document that was read during this transaction.\n private readVersions = documentVersionMap();\n private mutations: Mutation[] = [];\n private committed = false;\n\n /**\n * A deferred usage error that occurred previously in this transaction that\n * will cause the transaction to fail once it actually commits.\n */\n private lastWriteError: FirestoreError | null = null;\n\n /**\n * Set of documents that have been written in the transaction.\n *\n * When there's more than one write to the same key in a transaction, any\n * writes after the first are handled differently.\n */\n private writtenDocs: Set<DocumentKey> = new Set();\n\n constructor(private datastore: Datastore) {}\n\n async lookup(keys: DocumentKey[]): Promise<MaybeDocument[]> {\n this.ensureCommitNotCalled();\n\n if (this.mutations.length > 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Firestore transactions require all reads to be executed before all writes.'\n );\n }\n const docs = await invokeBatchGetDocumentsRpc(this.datastore, keys);\n docs.forEach(doc => {\n if (doc instanceof NoDocument || doc instanceof Document) {\n this.recordVersion(doc);\n } else {\n fail('Document in a transaction was a ' + doc.constructor.name);\n }\n });\n return docs;\n }\n\n set(key: DocumentKey, data: ParsedSetData): void {\n this.write(data.toMutations(key, this.precondition(key)));\n this.writtenDocs.add(key);\n }\n\n update(key: DocumentKey, data: ParsedUpdateData): void {\n try {\n this.write(data.toMutations(key, this.preconditionForUpdate(key)));\n } catch (e) {\n this.lastWriteError = e;\n }\n this.writtenDocs.add(key);\n }\n\n delete(key: DocumentKey): void {\n this.write([new DeleteMutation(key, this.precondition(key))]);\n this.writtenDocs.add(key);\n }\n\n async commit(): Promise<void> {\n this.ensureCommitNotCalled();\n\n if (this.lastWriteError) {\n throw this.lastWriteError;\n }\n let unwritten = this.readVersions;\n // For each mutation, note that the doc was written.\n this.mutations.forEach(mutation => {\n unwritten = unwritten.remove(mutation.key);\n });\n // For each document that was read but not written to, we want to perform\n // a `verify` operation.\n unwritten.forEach((key, _version) => {\n this.mutations.push(new VerifyMutation(key, this.precondition(key)));\n });\n await invokeCommitRpc(this.datastore, this.mutations);\n this.committed = true;\n }\n\n private recordVersion(doc: MaybeDocument): void {\n let docVersion: SnapshotVersion;\n\n if (doc instanceof Document) {\n docVersion = doc.version;\n } else if (doc instanceof NoDocument) {\n // For deleted docs, we must use baseVersion 0 when we overwrite them.\n docVersion = SnapshotVersion.min();\n } else {\n throw fail('Document in a transaction was a ' + doc.constructor.name);\n }\n\n const existingVersion = this.readVersions.get(doc.key);\n if (existingVersion !== null) {\n if (!docVersion.isEqual(existingVersion)) {\n // This transaction will fail no matter what.\n throw new FirestoreError(\n Code.ABORTED,\n 'Document version changed between two reads.'\n );\n }\n } else {\n this.readVersions = this.readVersions.insert(doc.key, docVersion);\n }\n }\n\n /**\n * Returns the version of this document when it was read in this transaction,\n * as a precondition, or no precondition if it was not read.\n */\n private precondition(key: DocumentKey): Precondition {\n const version = this.readVersions.get(key);\n if (!this.writtenDocs.has(key) && version) {\n return Precondition.updateTime(version);\n } else {\n return Precondition.none();\n }\n }\n\n /**\n * Returns the precondition for a document if the operation is an update.\n */\n private preconditionForUpdate(key: DocumentKey): Precondition {\n const version = this.readVersions.get(key);\n // The first time a document is written, we want to take into account the\n // read time and existence\n if (!this.writtenDocs.has(key) && version) {\n if (version.isEqual(SnapshotVersion.min())) {\n // The document doesn't exist, so fail the transaction.\n\n // This has to be validated locally because you can't send a\n // precondition that a document does not exist without changing the\n // semantics of the backend write to be an insert. This is the reverse\n // of what we want, since we want to assert that the document doesn't\n // exist but then send the update and have it fail. Since we can't\n // express that to the backend, we have to validate locally.\n\n // Note: this can change once we can send separate verify writes in the\n // transaction.\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n \"Can't update a document that doesn't exist.\"\n );\n }\n // Document exists, base precondition on document update time.\n return Precondition.updateTime(version);\n } else {\n // Document was not read, so we just use the preconditions for a blind\n // update.\n return Precondition.exists(true);\n }\n }\n\n private write(mutations: Mutation[]): void {\n this.ensureCommitNotCalled();\n this.mutations = this.mutations.concat(mutations);\n }\n\n private ensureCommitNotCalled(): void {\n debugAssert(\n !this.committed,\n 'A transaction object cannot be used after its update callback has been invoked.'\n );\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { OnlineState } from '../core/types';\nimport { debugAssert } from '../util/assert';\nimport { AsyncQueue, DelayedOperation, TimerId } from '../util/async_queue';\nimport { FirestoreError } from '../util/error';\nimport { logError, logDebug } from '../util/log';\n\nconst LOG_TAG = 'OnlineStateTracker';\n\n// To deal with transient failures, we allow multiple stream attempts before\n// giving up and transitioning from OnlineState.Unknown to Offline.\n// TODO(mikelehen): This used to be set to 2 as a mitigation for b/66228394.\n// @jdimond thinks that bug is sufficiently fixed so that we can set this back\n// to 1. If that works okay, we could potentially remove this logic entirely.\nconst MAX_WATCH_STREAM_FAILURES = 1;\n\n// To deal with stream attempts that don't succeed or fail in a timely manner,\n// we have a timeout for OnlineState to reach Online or Offline.\n// If the timeout is reached, we transition to Offline rather than waiting\n// indefinitely.\nconst ONLINE_STATE_TIMEOUT_MS = 10 * 1000;\n\n/**\n * A component used by the RemoteStore to track the OnlineState (that is,\n * whether or not the client as a whole should be considered to be online or\n * offline), implementing the appropriate heuristics.\n *\n * In particular, when the client is trying to connect to the backend, we\n * allow up to MAX_WATCH_STREAM_FAILURES within ONLINE_STATE_TIMEOUT_MS for\n * a connection to succeed. If we have too many failures or the timeout elapses,\n * then we set the OnlineState to Offline, and the client will behave as if\n * it is offline (get()s will return cached data, etc.).\n */\nexport class OnlineStateTracker {\n /** The current OnlineState. */\n private state = OnlineState.Unknown;\n\n /**\n * A count of consecutive failures to open the stream. If it reaches the\n * maximum defined by MAX_WATCH_STREAM_FAILURES, we'll set the OnlineState to\n * Offline.\n */\n private watchStreamFailures = 0;\n\n /**\n * A timer that elapses after ONLINE_STATE_TIMEOUT_MS, at which point we\n * transition from OnlineState.Unknown to OnlineState.Offline without waiting\n * for the stream to actually fail (MAX_WATCH_STREAM_FAILURES times).\n */\n private onlineStateTimer: DelayedOperation<void> | null = null;\n\n /**\n * Whether the client should log a warning message if it fails to connect to\n * the backend (initially true, cleared after a successful stream, or if we've\n * logged the message already).\n */\n private shouldWarnClientIsOffline = true;\n\n constructor(\n private asyncQueue: AsyncQueue,\n private onlineStateHandler: (onlineState: OnlineState) => void\n ) {}\n\n /**\n * Called by RemoteStore when a watch stream is started (including on each\n * backoff attempt).\n *\n * If this is the first attempt, it sets the OnlineState to Unknown and starts\n * the onlineStateTimer.\n */\n handleWatchStreamStart(): void {\n if (this.watchStreamFailures === 0) {\n this.setAndBroadcast(OnlineState.Unknown);\n\n debugAssert(\n this.onlineStateTimer === null,\n `onlineStateTimer shouldn't be started yet`\n );\n this.onlineStateTimer = this.asyncQueue.enqueueAfterDelay(\n TimerId.OnlineStateTimeout,\n ONLINE_STATE_TIMEOUT_MS,\n () => {\n this.onlineStateTimer = null;\n debugAssert(\n this.state === OnlineState.Unknown,\n 'Timer should be canceled if we transitioned to a different state.'\n );\n this.logClientOfflineWarningIfNecessary(\n `Backend didn't respond within ${ONLINE_STATE_TIMEOUT_MS / 1000} ` +\n `seconds.`\n );\n this.setAndBroadcast(OnlineState.Offline);\n\n // NOTE: handleWatchStreamFailure() will continue to increment\n // watchStreamFailures even though we are already marked Offline,\n // but this is non-harmful.\n\n return Promise.resolve();\n }\n );\n }\n }\n\n /**\n * Updates our OnlineState as appropriate after the watch stream reports a\n * failure. The first failure moves us to the 'Unknown' state. We then may\n * allow multiple failures (based on MAX_WATCH_STREAM_FAILURES) before we\n * actually transition to the 'Offline' state.\n */\n handleWatchStreamFailure(error: FirestoreError): void {\n if (this.state === OnlineState.Online) {\n this.setAndBroadcast(OnlineState.Unknown);\n\n // To get to OnlineState.Online, set() must have been called which would\n // have reset our heuristics.\n debugAssert(\n this.watchStreamFailures === 0,\n 'watchStreamFailures must be 0'\n );\n debugAssert(\n this.onlineStateTimer === null,\n 'onlineStateTimer must be null'\n );\n } else {\n this.watchStreamFailures++;\n if (this.watchStreamFailures >= MAX_WATCH_STREAM_FAILURES) {\n this.clearOnlineStateTimer();\n\n this.logClientOfflineWarningIfNecessary(\n `Connection failed ${MAX_WATCH_STREAM_FAILURES} ` +\n `times. Most recent error: ${error.toString()}`\n );\n\n this.setAndBroadcast(OnlineState.Offline);\n }\n }\n }\n\n /**\n * Explicitly sets the OnlineState to the specified state.\n *\n * Note that this resets our timers / failure counters, etc. used by our\n * Offline heuristics, so must not be used in place of\n * handleWatchStreamStart() and handleWatchStreamFailure().\n */\n set(newState: OnlineState): void {\n this.clearOnlineStateTimer();\n this.watchStreamFailures = 0;\n\n if (newState === OnlineState.Online) {\n // We've connected to watch at least once. Don't warn the developer\n // about being offline going forward.\n this.shouldWarnClientIsOffline = false;\n }\n\n this.setAndBroadcast(newState);\n }\n\n private setAndBroadcast(newState: OnlineState): void {\n if (newState !== this.state) {\n this.state = newState;\n this.onlineStateHandler(newState);\n }\n }\n\n private logClientOfflineWarningIfNecessary(details: string): void {\n const message =\n `Could not reach Cloud Firestore backend. ${details}\\n` +\n `This typically indicates that your device does not have a healthy ` +\n `Internet connection at the moment. The client will operate in offline ` +\n `mode until it is able to successfully connect to the backend.`;\n if (this.shouldWarnClientIsOffline) {\n logError(message);\n this.shouldWarnClientIsOffline = false;\n } else {\n logDebug(LOG_TAG, message);\n }\n }\n\n private clearOnlineStateTimer(): void {\n if (this.onlineStateTimer !== null) {\n this.onlineStateTimer.cancel();\n this.onlineStateTimer = null;\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { Transaction } from '../core/transaction';\nimport { OnlineState, TargetId } from '../core/types';\nimport { ignoreIfPrimaryLeaseLoss, LocalStore } from '../local/local_store';\nimport { TargetData, TargetPurpose } from '../local/target_data';\nimport { MutationResult } from '../model/mutation';\nimport {\n BATCHID_UNKNOWN,\n MutationBatch,\n MutationBatchResult\n} from '../model/mutation_batch';\nimport { debugAssert } from '../util/assert';\nimport { FirestoreError } from '../util/error';\nimport { logDebug } from '../util/log';\nimport { DocumentKeySet } from '../model/collections';\nimport { AsyncQueue } from '../util/async_queue';\nimport { ConnectivityMonitor, NetworkStatus } from './connectivity_monitor';\nimport {\n Datastore,\n newPersistentWatchStream,\n newPersistentWriteStream\n} from './datastore';\nimport { OnlineStateTracker } from './online_state_tracker';\nimport {\n PersistentListenStream,\n PersistentWriteStream\n} from './persistent_stream';\nimport { RemoteSyncer } from './remote_syncer';\nimport { isPermanentError, isPermanentWriteError } from './rpc_error';\nimport {\n DocumentWatchChange,\n ExistenceFilterChange,\n TargetMetadataProvider,\n WatchChange,\n WatchChangeAggregator,\n WatchTargetChange,\n WatchTargetChangeState\n} from './watch_change';\nimport { ByteString } from '../util/byte_string';\nimport { isIndexedDbTransactionError } from '../local/simple_db';\n\nconst LOG_TAG = 'RemoteStore';\n\n// TODO(b/35853402): Negotiate this with the stream.\nconst MAX_PENDING_WRITES = 10;\n\n/**\n * RemoteStore - An interface to remotely stored data, basically providing a\n * wrapper around the Datastore that is more reliable for the rest of the\n * system.\n *\n * RemoteStore is responsible for maintaining the connection to the server.\n * - maintaining a list of active listens.\n * - reconnecting when the connection is dropped.\n * - resuming all the active listens on reconnect.\n *\n * RemoteStore handles all incoming events from the Datastore.\n * - listening to the watch stream and repackaging the events as RemoteEvents\n * - notifying SyncEngine of any changes to the active listens.\n *\n * RemoteStore takes writes from other components and handles them reliably.\n * - pulling pending mutations from LocalStore and sending them to Datastore.\n * - retrying mutations that failed because of network problems.\n * - acking mutations to the SyncEngine once they are accepted or rejected.\n */\nexport class RemoteStore implements TargetMetadataProvider {\n /**\n * A list of up to MAX_PENDING_WRITES writes that we have fetched from the\n * LocalStore via fillWritePipeline() and have or will send to the write\n * stream.\n *\n * Whenever writePipeline.length > 0 the RemoteStore will attempt to start or\n * restart the write stream. When the stream is established the writes in the\n * pipeline will be sent in order.\n *\n * Writes remain in writePipeline until they are acknowledged by the backend\n * and thus will automatically be re-sent if the stream is interrupted /\n * restarted before they're acknowledged.\n *\n * Write responses from the backend are linked to their originating request\n * purely based on order, and so we can just shift() writes from the front of\n * the writePipeline as we receive responses.\n */\n private writePipeline: MutationBatch[] = [];\n\n /**\n * A mapping of watched targets that the client cares about tracking and the\n * user has explicitly called a 'listen' for this target.\n *\n * These targets may or may not have been sent to or acknowledged by the\n * server. On re-establishing the listen stream, these targets should be sent\n * to the server. The targets removed with unlistens are removed eagerly\n * without waiting for confirmation from the listen stream.\n */\n private listenTargets = new Map<TargetId, TargetData>();\n\n private connectivityMonitor: ConnectivityMonitor;\n private watchStream: PersistentListenStream;\n private writeStream: PersistentWriteStream;\n private watchChangeAggregator: WatchChangeAggregator | null = null;\n\n /**\n * Set to true by enableNetwork() and false by disableNetwork() and indicates\n * the user-preferred network state.\n */\n private networkEnabled = false;\n\n private isPrimary = false;\n\n /**\n * When set to `true`, the network was taken offline due to an IndexedDB\n * failure. The state is flipped to `false` when access becomes available\n * again.\n */\n private indexedDbFailed = false;\n\n private onlineStateTracker: OnlineStateTracker;\n\n constructor(\n /**\n * The local store, used to fill the write pipeline with outbound mutations.\n */\n private localStore: LocalStore,\n /** The client-side proxy for interacting with the backend. */\n private datastore: Datastore,\n private asyncQueue: AsyncQueue,\n onlineStateHandler: (onlineState: OnlineState) => void,\n connectivityMonitor: ConnectivityMonitor\n ) {\n this.connectivityMonitor = connectivityMonitor;\n this.connectivityMonitor.addCallback((status: NetworkStatus) => {\n asyncQueue.enqueueAndForget(async () => {\n if (this.canUseNetwork()) {\n logDebug(\n LOG_TAG,\n 'Restarting streams for network reachability change.'\n );\n await this.restartNetwork();\n }\n });\n });\n\n this.onlineStateTracker = new OnlineStateTracker(\n asyncQueue,\n onlineStateHandler\n );\n\n // Create streams (but note they're not started yet).\n this.watchStream = newPersistentWatchStream(this.datastore, asyncQueue, {\n onOpen: this.onWatchStreamOpen.bind(this),\n onClose: this.onWatchStreamClose.bind(this),\n onWatchChange: this.onWatchStreamChange.bind(this)\n });\n\n this.writeStream = newPersistentWriteStream(this.datastore, asyncQueue, {\n onOpen: this.onWriteStreamOpen.bind(this),\n onClose: this.onWriteStreamClose.bind(this),\n onHandshakeComplete: this.onWriteHandshakeComplete.bind(this),\n onMutationResult: this.onMutationResult.bind(this)\n });\n }\n\n /**\n * SyncEngine to notify of watch and write events. This must be set\n * immediately after construction.\n */\n syncEngine!: RemoteSyncer;\n\n /**\n * Starts up the remote store, creating streams, restoring state from\n * LocalStore, etc.\n */\n start(): Promise<void> {\n return this.enableNetwork();\n }\n\n /** Re-enables the network. Idempotent. */\n enableNetwork(): Promise<void> {\n this.networkEnabled = true;\n return this.enableNetworkInternal();\n }\n\n private async enableNetworkInternal(): Promise<void> {\n if (this.canUseNetwork()) {\n this.writeStream.lastStreamToken = await this.localStore.getLastStreamToken();\n\n if (this.shouldStartWatchStream()) {\n this.startWatchStream();\n } else {\n this.onlineStateTracker.set(OnlineState.Unknown);\n }\n\n // This will start the write stream if necessary.\n await this.fillWritePipeline();\n }\n }\n\n /**\n * Temporarily disables the network. The network can be re-enabled using\n * enableNetwork().\n */\n async disableNetwork(): Promise<void> {\n this.networkEnabled = false;\n await this.disableNetworkInternal();\n\n // Set the OnlineState to Offline so get()s return from cache, etc.\n this.onlineStateTracker.set(OnlineState.Offline);\n }\n\n private async disableNetworkInternal(): Promise<void> {\n await this.writeStream.stop();\n await this.watchStream.stop();\n\n if (this.writePipeline.length > 0) {\n logDebug(\n LOG_TAG,\n `Stopping write stream with ${this.writePipeline.length} pending writes`\n );\n this.writePipeline = [];\n }\n\n this.cleanUpWatchStreamState();\n }\n\n async shutdown(): Promise<void> {\n logDebug(LOG_TAG, 'RemoteStore shutting down.');\n this.networkEnabled = false;\n await this.disableNetworkInternal();\n this.connectivityMonitor.shutdown();\n\n // Set the OnlineState to Unknown (rather than Offline) to avoid potentially\n // triggering spurious listener events with cached data, etc.\n this.onlineStateTracker.set(OnlineState.Unknown);\n }\n\n /**\n * Starts new listen for the given target. Uses resume token if provided. It\n * is a no-op if the target of given `TargetData` is already being listened to.\n */\n listen(targetData: TargetData): void {\n if (this.listenTargets.has(targetData.targetId)) {\n return;\n }\n\n // Mark this as something the client is currently listening for.\n this.listenTargets.set(targetData.targetId, targetData);\n\n if (this.shouldStartWatchStream()) {\n // The listen will be sent in onWatchStreamOpen\n this.startWatchStream();\n } else if (this.watchStream.isOpen()) {\n this.sendWatchRequest(targetData);\n }\n }\n\n /**\n * Removes the listen from server. It is a no-op if the given target id is\n * not being listened to.\n */\n unlisten(targetId: TargetId): void {\n debugAssert(\n this.listenTargets.has(targetId),\n `unlisten called on target no currently watched: ${targetId}`\n );\n\n this.listenTargets.delete(targetId);\n if (this.watchStream.isOpen()) {\n this.sendUnwatchRequest(targetId);\n }\n\n if (this.listenTargets.size === 0) {\n if (this.watchStream.isOpen()) {\n this.watchStream.markIdle();\n } else if (this.canUseNetwork()) {\n // Revert to OnlineState.Unknown if the watch stream is not open and we\n // have no listeners, since without any listens to send we cannot\n // confirm if the stream is healthy and upgrade to OnlineState.Online.\n this.onlineStateTracker.set(OnlineState.Unknown);\n }\n }\n }\n\n /** {@link TargetMetadataProvider.getTargetDataForTarget} */\n getTargetDataForTarget(targetId: TargetId): TargetData | null {\n return this.listenTargets.get(targetId) || null;\n }\n\n /** {@link TargetMetadataProvider.getRemoteKeysForTarget} */\n getRemoteKeysForTarget(targetId: TargetId): DocumentKeySet {\n return this.syncEngine.getRemoteKeysForTarget(targetId);\n }\n\n /**\n * We need to increment the the expected number of pending responses we're due\n * from watch so we wait for the ack to process any messages from this target.\n */\n private sendWatchRequest(targetData: TargetData): void {\n this.watchChangeAggregator!.recordPendingTargetRequest(targetData.targetId);\n this.watchStream.watch(targetData);\n }\n\n /**\n * We need to increment the expected number of pending responses we're due\n * from watch so we wait for the removal on the server before we process any\n * messages from this target.\n */\n private sendUnwatchRequest(targetId: TargetId): void {\n this.watchChangeAggregator!.recordPendingTargetRequest(targetId);\n this.watchStream.unwatch(targetId);\n }\n\n private startWatchStream(): void {\n debugAssert(\n this.shouldStartWatchStream(),\n 'startWatchStream() called when shouldStartWatchStream() is false.'\n );\n\n this.watchChangeAggregator = new WatchChangeAggregator(this);\n this.watchStream.start();\n this.onlineStateTracker.handleWatchStreamStart();\n }\n\n /**\n * Returns whether the watch stream should be started because it's necessary\n * and has not yet been started.\n */\n private shouldStartWatchStream(): boolean {\n return (\n this.canUseNetwork() &&\n !this.watchStream.isStarted() &&\n this.listenTargets.size > 0\n );\n }\n\n canUseNetwork(): boolean {\n return !this.indexedDbFailed && this.isPrimary && this.networkEnabled;\n }\n\n private cleanUpWatchStreamState(): void {\n this.watchChangeAggregator = null;\n }\n\n private async onWatchStreamOpen(): Promise<void> {\n this.listenTargets.forEach((targetData, targetId) => {\n this.sendWatchRequest(targetData);\n });\n }\n\n private async onWatchStreamClose(error?: FirestoreError): Promise<void> {\n if (error === undefined) {\n // Graceful stop (due to stop() or idle timeout). Make sure that's\n // desirable.\n debugAssert(\n !this.shouldStartWatchStream(),\n 'Watch stream was stopped gracefully while still needed.'\n );\n }\n\n this.cleanUpWatchStreamState();\n\n // If we still need the watch stream, retry the connection.\n if (this.shouldStartWatchStream()) {\n this.onlineStateTracker.handleWatchStreamFailure(error!);\n\n this.startWatchStream();\n } else {\n // No need to restart watch stream because there are no active targets.\n // The online state is set to unknown because there is no active attempt\n // at establishing a connection\n this.onlineStateTracker.set(OnlineState.Unknown);\n }\n }\n\n private async onWatchStreamChange(\n watchChange: WatchChange,\n snapshotVersion: SnapshotVersion\n ): Promise<void> {\n // Mark the client as online since we got a message from the server\n this.onlineStateTracker.set(OnlineState.Online);\n\n if (\n watchChange instanceof WatchTargetChange &&\n watchChange.state === WatchTargetChangeState.Removed &&\n watchChange.cause\n ) {\n // There was an error on a target, don't wait for a consistent snapshot\n // to raise events\n try {\n await this.handleTargetError(watchChange);\n } catch (e) {\n logDebug(\n LOG_TAG,\n 'Failed to remove targets %s: %s ',\n watchChange.targetIds.join(','),\n e\n );\n await this.disableNetworkUntilRecovery(e);\n }\n return;\n }\n\n if (watchChange instanceof DocumentWatchChange) {\n this.watchChangeAggregator!.handleDocumentChange(watchChange);\n } else if (watchChange instanceof ExistenceFilterChange) {\n this.watchChangeAggregator!.handleExistenceFilter(watchChange);\n } else {\n debugAssert(\n watchChange instanceof WatchTargetChange,\n 'Expected watchChange to be an instance of WatchTargetChange'\n );\n this.watchChangeAggregator!.handleTargetChange(watchChange);\n }\n\n if (!snapshotVersion.isEqual(SnapshotVersion.min())) {\n try {\n const lastRemoteSnapshotVersion = await this.localStore.getLastRemoteSnapshotVersion();\n if (snapshotVersion.compareTo(lastRemoteSnapshotVersion) >= 0) {\n // We have received a target change with a global snapshot if the snapshot\n // version is not equal to SnapshotVersion.min().\n await this.raiseWatchSnapshot(snapshotVersion);\n }\n } catch (e) {\n logDebug(LOG_TAG, 'Failed to raise snapshot:', e);\n await this.disableNetworkUntilRecovery(e);\n }\n }\n }\n\n /**\n * Recovery logic for IndexedDB errors that takes the network offline until\n * IndexedDb probing succeeds. Retries are scheduled with backoff using\n * `enqueueRetryable()`.\n */\n private async disableNetworkUntilRecovery(e: FirestoreError): Promise<void> {\n if (isIndexedDbTransactionError(e)) {\n debugAssert(\n !this.indexedDbFailed,\n 'Unexpected network event when IndexedDB was marked failed.'\n );\n this.indexedDbFailed = true;\n\n // Disable network and raise offline snapshots\n await this.disableNetworkInternal();\n this.onlineStateTracker.set(OnlineState.Offline);\n\n // Probe IndexedDB periodically and re-enable network\n this.asyncQueue.enqueueRetryable(async () => {\n logDebug(LOG_TAG, 'Retrying IndexedDB access');\n // Issue a simple read operation to determine if IndexedDB recovered.\n // Ideally, we would expose a health check directly on SimpleDb, but\n // RemoteStore only has access to persistence through LocalStore.\n await this.localStore.getLastRemoteSnapshotVersion();\n this.indexedDbFailed = false;\n await this.enableNetworkInternal();\n });\n } else {\n throw e;\n }\n }\n\n /**\n * Takes a batch of changes from the Datastore, repackages them as a\n * RemoteEvent, and passes that on to the listener, which is typically the\n * SyncEngine.\n */\n private raiseWatchSnapshot(snapshotVersion: SnapshotVersion): Promise<void> {\n debugAssert(\n !snapshotVersion.isEqual(SnapshotVersion.min()),\n \"Can't raise event for unknown SnapshotVersion\"\n );\n const remoteEvent = this.watchChangeAggregator!.createRemoteEvent(\n snapshotVersion\n );\n\n // Update in-memory resume tokens. LocalStore will update the\n // persistent view of these when applying the completed RemoteEvent.\n remoteEvent.targetChanges.forEach((change, targetId) => {\n if (change.resumeToken.approximateByteSize() > 0) {\n const targetData = this.listenTargets.get(targetId);\n // A watched target might have been removed already.\n if (targetData) {\n this.listenTargets.set(\n targetId,\n targetData.withResumeToken(change.resumeToken, snapshotVersion)\n );\n }\n }\n });\n\n // Re-establish listens for the targets that have been invalidated by\n // existence filter mismatches.\n remoteEvent.targetMismatches.forEach(targetId => {\n const targetData = this.listenTargets.get(targetId);\n if (!targetData) {\n // A watched target might have been removed already.\n return;\n }\n\n // Clear the resume token for the target, since we're in a known mismatch\n // state.\n this.listenTargets.set(\n targetId,\n targetData.withResumeToken(\n ByteString.EMPTY_BYTE_STRING,\n targetData.snapshotVersion\n )\n );\n\n // Cause a hard reset by unwatching and rewatching immediately, but\n // deliberately don't send a resume token so that we get a full update.\n this.sendUnwatchRequest(targetId);\n\n // Mark the target we send as being on behalf of an existence filter\n // mismatch, but don't actually retain that in listenTargets. This ensures\n // that we flag the first re-listen this way without impacting future\n // listens of this target (that might happen e.g. on reconnect).\n const requestTargetData = new TargetData(\n targetData.target,\n targetId,\n TargetPurpose.ExistenceFilterMismatch,\n targetData.sequenceNumber\n );\n this.sendWatchRequest(requestTargetData);\n });\n\n // Finally raise remote event\n return this.syncEngine.applyRemoteEvent(remoteEvent);\n }\n\n /** Handles an error on a target */\n private async handleTargetError(\n watchChange: WatchTargetChange\n ): Promise<void> {\n debugAssert(!!watchChange.cause, 'Handling target error without a cause');\n const error = watchChange.cause!;\n for (const targetId of watchChange.targetIds) {\n // A watched target might have been removed already.\n if (this.listenTargets.has(targetId)) {\n await this.syncEngine.rejectListen(targetId, error);\n this.listenTargets.delete(targetId);\n this.watchChangeAggregator!.removeTarget(targetId);\n }\n }\n }\n\n /**\n * Attempts to fill our write pipeline with writes from the LocalStore.\n *\n * Called internally to bootstrap or refill the write pipeline and by\n * SyncEngine whenever there are new mutations to process.\n *\n * Starts the write stream if necessary.\n */\n async fillWritePipeline(): Promise<void> {\n if (this.canAddToWritePipeline()) {\n const lastBatchIdRetrieved =\n this.writePipeline.length > 0\n ? this.writePipeline[this.writePipeline.length - 1].batchId\n : BATCHID_UNKNOWN;\n const batch = await this.localStore.nextMutationBatch(\n lastBatchIdRetrieved\n );\n\n if (batch === null) {\n if (this.writePipeline.length === 0) {\n this.writeStream.markIdle();\n }\n } else {\n this.addToWritePipeline(batch);\n await this.fillWritePipeline();\n }\n }\n\n if (this.shouldStartWriteStream()) {\n this.startWriteStream();\n }\n }\n\n /**\n * Returns true if we can add to the write pipeline (i.e. the network is\n * enabled and the write pipeline is not full).\n */\n private canAddToWritePipeline(): boolean {\n return (\n this.canUseNetwork() && this.writePipeline.length < MAX_PENDING_WRITES\n );\n }\n\n // For testing\n outstandingWrites(): number {\n return this.writePipeline.length;\n }\n\n /**\n * Queues additional writes to be sent to the write stream, sending them\n * immediately if the write stream is established.\n */\n private addToWritePipeline(batch: MutationBatch): void {\n debugAssert(\n this.canAddToWritePipeline(),\n 'addToWritePipeline called when pipeline is full'\n );\n this.writePipeline.push(batch);\n\n if (this.writeStream.isOpen() && this.writeStream.handshakeComplete) {\n this.writeStream.writeMutations(batch.mutations);\n }\n }\n\n private shouldStartWriteStream(): boolean {\n return (\n this.canUseNetwork() &&\n !this.writeStream.isStarted() &&\n this.writePipeline.length > 0\n );\n }\n\n private startWriteStream(): void {\n debugAssert(\n this.shouldStartWriteStream(),\n 'startWriteStream() called when shouldStartWriteStream() is false.'\n );\n this.writeStream.start();\n }\n\n private async onWriteStreamOpen(): Promise<void> {\n this.writeStream.writeHandshake();\n }\n\n private onWriteHandshakeComplete(): Promise<void> {\n // Record the stream token.\n return this.localStore\n .setLastStreamToken(this.writeStream.lastStreamToken)\n .then(() => {\n // Send the write pipeline now that the stream is established.\n for (const batch of this.writePipeline) {\n this.writeStream.writeMutations(batch.mutations);\n }\n })\n .catch(ignoreIfPrimaryLeaseLoss);\n }\n\n private onMutationResult(\n commitVersion: SnapshotVersion,\n results: MutationResult[]\n ): Promise<void> {\n // This is a response to a write containing mutations and should be\n // correlated to the first write in our write pipeline.\n debugAssert(\n this.writePipeline.length > 0,\n 'Got result for empty write pipeline'\n );\n const batch = this.writePipeline.shift()!;\n const success = MutationBatchResult.from(\n batch,\n commitVersion,\n results,\n this.writeStream.lastStreamToken\n );\n return this.syncEngine.applySuccessfulWrite(success).then(() => {\n // It's possible that with the completion of this mutation another\n // slot has freed up.\n return this.fillWritePipeline();\n });\n }\n\n private async onWriteStreamClose(error?: FirestoreError): Promise<void> {\n if (error === undefined) {\n // Graceful stop (due to stop() or idle timeout). Make sure that's\n // desirable.\n debugAssert(\n !this.shouldStartWriteStream(),\n 'Write stream was stopped gracefully while still needed.'\n );\n }\n\n // If the write stream closed due to an error, invoke the error callbacks if\n // there are pending writes.\n if (error && this.writePipeline.length > 0) {\n if (this.writeStream.handshakeComplete) {\n // This error affects the actual write.\n await this.handleWriteError(error!);\n } else {\n // If there was an error before the handshake has finished, it's\n // possible that the server is unable to process the stream token\n // we're sending. (Perhaps it's too old?)\n await this.handleHandshakeError(error!);\n }\n\n // The write stream might have been started by refilling the write\n // pipeline for failed writes\n if (this.shouldStartWriteStream()) {\n this.startWriteStream();\n }\n }\n // No pending writes, nothing to do\n }\n\n private async handleHandshakeError(error: FirestoreError): Promise<void> {\n // Reset the token if it's a permanent error, signaling the write stream is\n // no longer valid. Note that the handshake does not count as a write: see\n // comments on isPermanentWriteError for details.\n if (isPermanentError(error.code)) {\n logDebug(\n LOG_TAG,\n 'RemoteStore error before completed handshake; resetting stream token: ',\n this.writeStream.lastStreamToken\n );\n this.writeStream.lastStreamToken = ByteString.EMPTY_BYTE_STRING;\n\n return this.localStore\n .setLastStreamToken(ByteString.EMPTY_BYTE_STRING)\n .catch(ignoreIfPrimaryLeaseLoss);\n } else {\n // Some other error, don't reset stream token. Our stream logic will\n // just retry with exponential backoff.\n }\n }\n\n private async handleWriteError(error: FirestoreError): Promise<void> {\n // Only handle permanent errors here. If it's transient, just let the retry\n // logic kick in.\n if (isPermanentWriteError(error.code)) {\n // This was a permanent error, the request itself was the problem\n // so it's not going to succeed if we resend it.\n const batch = this.writePipeline.shift()!;\n\n // In this case it's also unlikely that the server itself is melting\n // down -- this was just a bad request so inhibit backoff on the next\n // restart.\n this.writeStream.inhibitBackoff();\n\n return this.syncEngine\n .rejectFailedWrite(batch.batchId, error)\n .then(() => {\n // It's possible that with the completion of this mutation\n // another slot has freed up.\n return this.fillWritePipeline();\n });\n } else {\n // Transient error, just let the retry logic kick in.\n }\n }\n\n createTransaction(): Transaction {\n return new Transaction(this.datastore);\n }\n\n private async restartNetwork(): Promise<void> {\n this.networkEnabled = false;\n await this.disableNetworkInternal();\n this.onlineStateTracker.set(OnlineState.Unknown);\n await this.enableNetwork();\n }\n\n async handleCredentialChange(): Promise<void> {\n if (this.canUseNetwork()) {\n // Tear down and re-create our network streams. This will ensure we get a fresh auth token\n // for the new user and re-fill the write pipeline with new mutations from the LocalStore\n // (since mutations are per-user).\n logDebug(LOG_TAG, 'RemoteStore restarting streams for new credential');\n await this.restartNetwork();\n }\n }\n\n /**\n * Toggles the network state when the client gains or loses its primary lease.\n */\n async applyPrimaryState(isPrimary: boolean): Promise<void> {\n this.isPrimary = isPrimary;\n\n if (isPrimary && this.networkEnabled) {\n await this.enableNetwork();\n } else if (!isPrimary) {\n await this.disableNetworkInternal();\n this.onlineStateTracker.set(OnlineState.Unknown);\n }\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { User } from '../auth/user';\nimport { ListenSequence } from '../core/listen_sequence';\nimport {\n BatchId,\n ListenSequenceNumber,\n MutationBatchState,\n OnlineState,\n TargetId\n} from '../core/types';\nimport { TargetIdSet, targetIdSet } from '../model/collections';\nimport { Platform } from '../platform/platform';\nimport { hardAssert, debugAssert } from '../util/assert';\nimport { AsyncQueue } from '../util/async_queue';\nimport { Code, FirestoreError } from '../util/error';\nimport { logError, logDebug } from '../util/log';\nimport { SortedSet } from '../util/sorted_set';\nimport { SortedMap } from '../util/sorted_map';\nimport { primitiveComparator } from '../util/misc';\nimport { isSafeInteger } from '../util/types';\nimport {\n QueryTargetState,\n SharedClientStateSyncer\n} from './shared_client_state_syncer';\nimport {\n CLIENT_STATE_KEY_PREFIX,\n ClientStateSchema,\n createWebStorageClientStateKey,\n createWebStorageMutationBatchKey,\n createWebStorageOnlineStateKey,\n createWebStorageQueryTargetMetadataKey,\n createWebStorageSequenceNumberKey,\n MUTATION_BATCH_KEY_PREFIX,\n MutationMetadataSchema,\n QUERY_TARGET_KEY_PREFIX,\n QueryTargetStateSchema,\n SharedOnlineStateSchema\n} from './shared_client_state_schema';\n\nconst LOG_TAG = 'SharedClientState';\n\n/**\n * A randomly-generated key assigned to each Firestore instance at startup.\n */\nexport type ClientId = string;\n\n/**\n * A `SharedClientState` keeps track of the global state of the mutations\n * and query targets for all active clients with the same persistence key (i.e.\n * project ID and FirebaseApp name). It relays local changes to other clients\n * and updates its local state as new state is observed.\n *\n * `SharedClientState` is primarily used for synchronization in Multi-Tab\n * environments. Each tab is responsible for registering its active query\n * targets and mutations. `SharedClientState` will then notify the listener\n * assigned to `.syncEngine` for updates to mutations and queries that\n * originated in other clients.\n *\n * To receive notifications, `.syncEngine` and `.onlineStateHandler` has to be\n * assigned before calling `start()`.\n */\nexport interface SharedClientState {\n syncEngine: SharedClientStateSyncer | null;\n onlineStateHandler: ((onlineState: OnlineState) => void) | null;\n sequenceNumberHandler:\n | ((sequenceNumber: ListenSequenceNumber) => void)\n | null;\n\n /** Registers the Mutation Batch ID of a newly pending mutation. */\n addPendingMutation(batchId: BatchId): void;\n\n /**\n * Records that a pending mutation has been acknowledged or rejected.\n * Called by the primary client to notify secondary clients of mutation\n * results as they come back from the backend.\n */\n updateMutationState(\n batchId: BatchId,\n state: 'acknowledged' | 'rejected',\n error?: FirestoreError\n ): void;\n\n /**\n * Associates a new Query Target ID with the local Firestore client. Returns\n * the new query state for the query (which can be 'current' if the query is\n * already associated with another tab).\n *\n * If the target id is already associated with local client, the method simply\n * returns its `QueryTargetState`.\n */\n addLocalQueryTarget(targetId: TargetId): QueryTargetState;\n\n /** Removes the Query Target ID association from the local client. */\n removeLocalQueryTarget(targetId: TargetId): void;\n\n /** Checks whether the target is associated with the local client. */\n isLocalQueryTarget(targetId: TargetId): boolean;\n\n /**\n * Processes an update to a query target.\n *\n * Called by the primary client to notify secondary clients of document\n * changes or state transitions that affect the provided query target.\n */\n updateQueryState(\n targetId: TargetId,\n state: QueryTargetState,\n error?: FirestoreError\n ): void;\n\n /**\n * Removes the target's metadata entry.\n *\n * Called by the primary client when all clients stopped listening to a query\n * target.\n */\n clearQueryState(targetId: TargetId): void;\n\n /**\n * Gets the active Query Targets IDs for all active clients.\n *\n * The implementation for this may require O(n) runtime, where 'n' is the size\n * of the result set.\n */\n // Visible for testing\n getAllActiveQueryTargets(): SortedSet<TargetId>;\n\n /**\n * Checks whether the provided target ID is currently being listened to by\n * any of the active clients.\n *\n * The implementation may require O(n*log m) runtime, where 'n' is the number\n * of clients and 'm' the number of targets.\n */\n isActiveQueryTarget(targetId: TargetId): boolean;\n\n /**\n * Starts the SharedClientState, reads existing client data and registers\n * listeners for updates to new and existing clients.\n */\n start(): Promise<void>;\n\n /** Shuts down the `SharedClientState` and its listeners. */\n shutdown(): void;\n\n /**\n * Changes the active user and removes all existing user-specific data. The\n * user change does not call back into SyncEngine (for example, no mutations\n * will be marked as removed).\n */\n handleUserChange(\n user: User,\n removedBatchIds: BatchId[],\n addedBatchIds: BatchId[]\n ): void;\n\n /** Changes the shared online state of all clients. */\n setOnlineState(onlineState: OnlineState): void;\n\n writeSequenceNumber(sequenceNumber: ListenSequenceNumber): void;\n}\n\n/**\n * Holds the state of a mutation batch, including its user ID, batch ID and\n * whether the batch is 'pending', 'acknowledged' or 'rejected'.\n */\n// Visible for testing\nexport class MutationMetadata {\n constructor(\n readonly user: User,\n readonly batchId: BatchId,\n readonly state: MutationBatchState,\n readonly error?: FirestoreError\n ) {\n debugAssert(\n (error !== undefined) === (state === 'rejected'),\n `MutationMetadata must contain an error iff state is 'rejected'`\n );\n }\n\n /**\n * Parses a MutationMetadata from its JSON representation in WebStorage.\n * Logs a warning and returns null if the format of the data is not valid.\n */\n static fromWebStorageEntry(\n user: User,\n batchId: BatchId,\n value: string\n ): MutationMetadata | null {\n const mutationBatch = JSON.parse(value) as MutationMetadataSchema;\n\n let validData =\n typeof mutationBatch === 'object' &&\n ['pending', 'acknowledged', 'rejected'].indexOf(mutationBatch.state) !==\n -1 &&\n (mutationBatch.error === undefined ||\n typeof mutationBatch.error === 'object');\n\n let firestoreError: FirestoreError | undefined = undefined;\n\n if (validData && mutationBatch.error) {\n validData =\n typeof mutationBatch.error.message === 'string' &&\n typeof mutationBatch.error.code === 'string';\n if (validData) {\n firestoreError = new FirestoreError(\n mutationBatch.error.code as Code,\n mutationBatch.error.message\n );\n }\n }\n\n if (validData) {\n return new MutationMetadata(\n user,\n batchId,\n mutationBatch.state,\n firestoreError\n );\n } else {\n logError(\n LOG_TAG,\n `Failed to parse mutation state for ID '${batchId}': ${value}`\n );\n return null;\n }\n }\n\n toWebStorageJSON(): string {\n const batchMetadata: MutationMetadataSchema = {\n state: this.state,\n updateTimeMs: Date.now() // Modify the existing value to trigger update.\n };\n\n if (this.error) {\n batchMetadata.error = {\n code: this.error.code,\n message: this.error.message\n };\n }\n\n return JSON.stringify(batchMetadata);\n }\n}\n\n/**\n * Holds the state of a query target, including its target ID and whether the\n * target is 'not-current', 'current' or 'rejected'.\n */\n// Visible for testing\nexport class QueryTargetMetadata {\n constructor(\n readonly targetId: TargetId,\n readonly state: QueryTargetState,\n readonly error?: FirestoreError\n ) {\n debugAssert(\n (error !== undefined) === (state === 'rejected'),\n `QueryTargetMetadata must contain an error iff state is 'rejected'`\n );\n }\n\n /**\n * Parses a QueryTargetMetadata from its JSON representation in WebStorage.\n * Logs a warning and returns null if the format of the data is not valid.\n */\n static fromWebStorageEntry(\n targetId: TargetId,\n value: string\n ): QueryTargetMetadata | null {\n const targetState = JSON.parse(value) as QueryTargetStateSchema;\n\n let validData =\n typeof targetState === 'object' &&\n ['not-current', 'current', 'rejected'].indexOf(targetState.state) !==\n -1 &&\n (targetState.error === undefined ||\n typeof targetState.error === 'object');\n\n let firestoreError: FirestoreError | undefined = undefined;\n\n if (validData && targetState.error) {\n validData =\n typeof targetState.error.message === 'string' &&\n typeof targetState.error.code === 'string';\n if (validData) {\n firestoreError = new FirestoreError(\n targetState.error.code as Code,\n targetState.error.message\n );\n }\n }\n\n if (validData) {\n return new QueryTargetMetadata(\n targetId,\n targetState.state,\n firestoreError\n );\n } else {\n logError(\n LOG_TAG,\n `Failed to parse target state for ID '${targetId}': ${value}`\n );\n return null;\n }\n }\n\n toWebStorageJSON(): string {\n const targetState: QueryTargetStateSchema = {\n state: this.state,\n updateTimeMs: Date.now() // Modify the existing value to trigger update.\n };\n\n if (this.error) {\n targetState.error = {\n code: this.error.code,\n message: this.error.message\n };\n }\n\n return JSON.stringify(targetState);\n }\n}\n\n/**\n * Metadata state of a single client denoting the query targets it is actively\n * listening to.\n */\n// Visible for testing.\nexport interface ClientState {\n readonly activeTargetIds: TargetIdSet;\n}\n\n/**\n * This class represents the immutable ClientState for a client read from\n * WebStorage, containing the list of active query targets.\n */\nclass RemoteClientState implements ClientState {\n private constructor(\n readonly clientId: ClientId,\n readonly activeTargetIds: TargetIdSet\n ) {}\n\n /**\n * Parses a RemoteClientState from the JSON representation in WebStorage.\n * Logs a warning and returns null if the format of the data is not valid.\n */\n static fromWebStorageEntry(\n clientId: ClientId,\n value: string\n ): RemoteClientState | null {\n const clientState = JSON.parse(value) as ClientStateSchema;\n\n let validData =\n typeof clientState === 'object' &&\n clientState.activeTargetIds instanceof Array;\n\n let activeTargetIdsSet = targetIdSet();\n\n for (let i = 0; validData && i < clientState.activeTargetIds.length; ++i) {\n validData = isSafeInteger(clientState.activeTargetIds[i]);\n activeTargetIdsSet = activeTargetIdsSet.add(\n clientState.activeTargetIds[i]\n );\n }\n\n if (validData) {\n return new RemoteClientState(clientId, activeTargetIdsSet);\n } else {\n logError(\n LOG_TAG,\n `Failed to parse client data for instance '${clientId}': ${value}`\n );\n return null;\n }\n }\n}\n\n/**\n * This class represents the online state for all clients participating in\n * multi-tab. The online state is only written to by the primary client, and\n * used in secondary clients to update their query views.\n */\nexport class SharedOnlineState {\n constructor(readonly clientId: string, readonly onlineState: OnlineState) {}\n\n /**\n * Parses a SharedOnlineState from its JSON representation in WebStorage.\n * Logs a warning and returns null if the format of the data is not valid.\n */\n static fromWebStorageEntry(value: string): SharedOnlineState | null {\n const onlineState = JSON.parse(value) as SharedOnlineStateSchema;\n\n const validData =\n typeof onlineState === 'object' &&\n ['Unknown', 'Online', 'Offline'].indexOf(onlineState.onlineState) !==\n -1 &&\n typeof onlineState.clientId === 'string';\n\n if (validData) {\n return new SharedOnlineState(\n onlineState.clientId,\n onlineState.onlineState as OnlineState\n );\n } else {\n logError(LOG_TAG, `Failed to parse online state: ${value}`);\n return null;\n }\n }\n}\n\n/**\n * Metadata state of the local client. Unlike `RemoteClientState`, this class is\n * mutable and keeps track of all pending mutations, which allows us to\n * update the range of pending mutation batch IDs as new mutations are added or\n * removed.\n *\n * The data in `LocalClientState` is not read from WebStorage and instead\n * updated via its instance methods. The updated state can be serialized via\n * `toWebStorageJSON()`.\n */\n// Visible for testing.\nexport class LocalClientState implements ClientState {\n activeTargetIds = targetIdSet();\n\n addQueryTarget(targetId: TargetId): void {\n this.activeTargetIds = this.activeTargetIds.add(targetId);\n }\n\n removeQueryTarget(targetId: TargetId): void {\n this.activeTargetIds = this.activeTargetIds.delete(targetId);\n }\n\n /**\n * Converts this entry into a JSON-encoded format we can use for WebStorage.\n * Does not encode `clientId` as it is part of the key in WebStorage.\n */\n toWebStorageJSON(): string {\n const data: ClientStateSchema = {\n activeTargetIds: this.activeTargetIds.toArray(),\n updateTimeMs: Date.now() // Modify the existing value to trigger update.\n };\n return JSON.stringify(data);\n }\n}\n\n/**\n * `WebStorageSharedClientState` uses WebStorage (window.localStorage) as the\n * backing store for the SharedClientState. It keeps track of all active\n * clients and supports modifications of the local client's data.\n */\nexport class WebStorageSharedClientState implements SharedClientState {\n syncEngine: SharedClientStateSyncer | null = null;\n onlineStateHandler: ((onlineState: OnlineState) => void) | null = null;\n sequenceNumberHandler:\n | ((sequenceNumber: ListenSequenceNumber) => void)\n | null = null;\n\n private readonly storage: Storage;\n private readonly localClientStorageKey: string;\n private readonly sequenceNumberKey: string;\n private readonly storageListener = this.handleWebStorageEvent.bind(this);\n private readonly onlineStateKey: string;\n private readonly clientStateKeyRe: RegExp;\n private readonly mutationBatchKeyRe: RegExp;\n private readonly queryTargetKeyRe: RegExp;\n private activeClients = new SortedMap<string, ClientState>(\n primitiveComparator\n );\n private started = false;\n private currentUser: User;\n\n /**\n * Captures WebStorage events that occur before `start()` is called. These\n * events are replayed once `WebStorageSharedClientState` is started.\n */\n private earlyEvents: StorageEvent[] = [];\n\n constructor(\n private readonly queue: AsyncQueue,\n private readonly platform: Platform,\n private readonly persistenceKey: string,\n private readonly localClientId: ClientId,\n initialUser: User\n ) {\n if (!WebStorageSharedClientState.isAvailable(this.platform)) {\n throw new FirestoreError(\n Code.UNIMPLEMENTED,\n 'LocalStorage is not available on this platform.'\n );\n }\n // Escape the special characters mentioned here:\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions\n const escapedPersistenceKey = persistenceKey.replace(\n /[.*+?^${}()|[\\]\\\\]/g,\n '\\\\$&'\n );\n\n this.storage = this.platform.window!.localStorage;\n this.currentUser = initialUser;\n this.localClientStorageKey = createWebStorageClientStateKey(\n this.persistenceKey,\n this.localClientId\n );\n this.sequenceNumberKey = createWebStorageSequenceNumberKey(\n this.persistenceKey\n );\n this.activeClients = this.activeClients.insert(\n this.localClientId,\n new LocalClientState()\n );\n\n this.clientStateKeyRe = new RegExp(\n `^${CLIENT_STATE_KEY_PREFIX}_${escapedPersistenceKey}_([^_]*)$`\n );\n this.mutationBatchKeyRe = new RegExp(\n `^${MUTATION_BATCH_KEY_PREFIX}_${escapedPersistenceKey}_(\\\\d+)(?:_(.*))?$`\n );\n this.queryTargetKeyRe = new RegExp(\n `^${QUERY_TARGET_KEY_PREFIX}_${escapedPersistenceKey}_(\\\\d+)$`\n );\n\n this.onlineStateKey = createWebStorageOnlineStateKey(this.persistenceKey);\n\n // Rather than adding the storage observer during start(), we add the\n // storage observer during initialization. This ensures that we collect\n // events before other components populate their initial state (during their\n // respective start() calls). Otherwise, we might for example miss a\n // mutation that is added after LocalStore's start() processed the existing\n // mutations but before we observe WebStorage events.\n this.platform.window!.addEventListener('storage', this.storageListener);\n }\n\n /** Returns 'true' if WebStorage is available in the current environment. */\n static isAvailable(platform: Platform): boolean {\n return !!(platform.window && platform.window.localStorage != null);\n }\n\n async start(): Promise<void> {\n debugAssert(!this.started, 'WebStorageSharedClientState already started');\n debugAssert(\n this.syncEngine !== null,\n 'syncEngine property must be set before calling start()'\n );\n debugAssert(\n this.onlineStateHandler !== null,\n 'onlineStateHandler property must be set before calling start()'\n );\n\n // Retrieve the list of existing clients to backfill the data in\n // SharedClientState.\n const existingClients = await this.syncEngine!.getActiveClients();\n\n for (const clientId of existingClients) {\n if (clientId === this.localClientId) {\n continue;\n }\n\n const storageItem = this.getItem(\n createWebStorageClientStateKey(this.persistenceKey, clientId)\n );\n if (storageItem) {\n const clientState = RemoteClientState.fromWebStorageEntry(\n clientId,\n storageItem\n );\n if (clientState) {\n this.activeClients = this.activeClients.insert(\n clientState.clientId,\n clientState\n );\n }\n }\n }\n\n this.persistClientState();\n\n // Check if there is an existing online state and call the callback handler\n // if applicable.\n const onlineStateJSON = this.storage.getItem(this.onlineStateKey);\n if (onlineStateJSON) {\n const onlineState = this.fromWebStorageOnlineState(onlineStateJSON);\n if (onlineState) {\n this.handleOnlineStateEvent(onlineState);\n }\n }\n\n for (const event of this.earlyEvents) {\n this.handleWebStorageEvent(event);\n }\n\n this.earlyEvents = [];\n\n // Register a window unload hook to remove the client metadata entry from\n // WebStorage even if `shutdown()` was not called.\n this.platform.window!.addEventListener('unload', () => this.shutdown());\n\n this.started = true;\n }\n\n writeSequenceNumber(sequenceNumber: ListenSequenceNumber): void {\n this.setItem(this.sequenceNumberKey, JSON.stringify(sequenceNumber));\n }\n\n getAllActiveQueryTargets(): TargetIdSet {\n return this.extractActiveQueryTargets(this.activeClients);\n }\n\n isActiveQueryTarget(targetId: TargetId): boolean {\n let found = false;\n this.activeClients.forEach((key, value) => {\n if (value.activeTargetIds.has(targetId)) {\n found = true;\n }\n });\n return found;\n }\n\n addPendingMutation(batchId: BatchId): void {\n this.persistMutationState(batchId, 'pending');\n }\n\n updateMutationState(\n batchId: BatchId,\n state: 'acknowledged' | 'rejected',\n error?: FirestoreError\n ): void {\n this.persistMutationState(batchId, state, error);\n\n // Once a final mutation result is observed by other clients, they no longer\n // access the mutation's metadata entry. Since WebStorage replays events\n // in order, it is safe to delete the entry right after updating it.\n this.removeMutationState(batchId);\n }\n\n addLocalQueryTarget(targetId: TargetId): QueryTargetState {\n let queryState: QueryTargetState = 'not-current';\n\n // Lookup an existing query state if the target ID was already registered\n // by another tab\n if (this.isActiveQueryTarget(targetId)) {\n const storageItem = this.storage.getItem(\n createWebStorageQueryTargetMetadataKey(this.persistenceKey, targetId)\n );\n\n if (storageItem) {\n const metadata = QueryTargetMetadata.fromWebStorageEntry(\n targetId,\n storageItem\n );\n if (metadata) {\n queryState = metadata.state;\n }\n }\n }\n\n this.localClientState.addQueryTarget(targetId);\n this.persistClientState();\n\n return queryState;\n }\n\n removeLocalQueryTarget(targetId: TargetId): void {\n this.localClientState.removeQueryTarget(targetId);\n this.persistClientState();\n }\n\n isLocalQueryTarget(targetId: TargetId): boolean {\n return this.localClientState.activeTargetIds.has(targetId);\n }\n\n clearQueryState(targetId: TargetId): void {\n this.removeItem(\n createWebStorageQueryTargetMetadataKey(this.persistenceKey, targetId)\n );\n }\n\n updateQueryState(\n targetId: TargetId,\n state: QueryTargetState,\n error?: FirestoreError\n ): void {\n this.persistQueryTargetState(targetId, state, error);\n }\n\n handleUserChange(\n user: User,\n removedBatchIds: BatchId[],\n addedBatchIds: BatchId[]\n ): void {\n removedBatchIds.forEach(batchId => {\n this.removeMutationState(batchId);\n });\n this.currentUser = user;\n addedBatchIds.forEach(batchId => {\n this.addPendingMutation(batchId);\n });\n }\n\n setOnlineState(onlineState: OnlineState): void {\n this.persistOnlineState(onlineState);\n }\n\n shutdown(): void {\n if (this.started) {\n this.platform.window!.removeEventListener(\n 'storage',\n this.storageListener\n );\n this.removeItem(this.localClientStorageKey);\n this.started = false;\n }\n }\n\n private getItem(key: string): string | null {\n const value = this.storage.getItem(key);\n logDebug(LOG_TAG, 'READ', key, value);\n return value;\n }\n\n private setItem(key: string, value: string): void {\n logDebug(LOG_TAG, 'SET', key, value);\n this.storage.setItem(key, value);\n }\n\n private removeItem(key: string): void {\n logDebug(LOG_TAG, 'REMOVE', key);\n this.storage.removeItem(key);\n }\n\n private handleWebStorageEvent(event: StorageEvent): void {\n if (event.storageArea === this.storage) {\n logDebug(LOG_TAG, 'EVENT', event.key, event.newValue);\n\n if (event.key === this.localClientStorageKey) {\n logError(\n 'Received WebStorage notification for local change. Another client might have ' +\n 'garbage-collected our state'\n );\n return;\n }\n\n this.queue.enqueueRetryable(async () => {\n if (!this.started) {\n this.earlyEvents.push(event);\n return;\n }\n\n if (event.key === null) {\n return;\n }\n\n if (this.clientStateKeyRe.test(event.key)) {\n if (event.newValue != null) {\n const clientState = this.fromWebStorageClientState(\n event.key,\n event.newValue\n );\n if (clientState) {\n return this.handleClientStateEvent(\n clientState.clientId,\n clientState\n );\n }\n } else {\n const clientId = this.fromWebStorageClientStateKey(event.key)!;\n return this.handleClientStateEvent(clientId, null);\n }\n } else if (this.mutationBatchKeyRe.test(event.key)) {\n if (event.newValue !== null) {\n const mutationMetadata = this.fromWebStorageMutationMetadata(\n event.key,\n event.newValue\n );\n if (mutationMetadata) {\n return this.handleMutationBatchEvent(mutationMetadata);\n }\n }\n } else if (this.queryTargetKeyRe.test(event.key)) {\n if (event.newValue !== null) {\n const queryTargetMetadata = this.fromWebStorageQueryTargetMetadata(\n event.key,\n event.newValue\n );\n if (queryTargetMetadata) {\n return this.handleQueryTargetEvent(queryTargetMetadata);\n }\n }\n } else if (event.key === this.onlineStateKey) {\n if (event.newValue !== null) {\n const onlineState = this.fromWebStorageOnlineState(event.newValue);\n if (onlineState) {\n return this.handleOnlineStateEvent(onlineState);\n }\n }\n } else if (event.key === this.sequenceNumberKey) {\n debugAssert(\n !!this.sequenceNumberHandler,\n 'Missing sequenceNumberHandler'\n );\n const sequenceNumber = fromWebStorageSequenceNumber(event.newValue);\n if (sequenceNumber !== ListenSequence.INVALID) {\n this.sequenceNumberHandler!(sequenceNumber);\n }\n }\n });\n }\n }\n\n private get localClientState(): LocalClientState {\n return this.activeClients.get(this.localClientId) as LocalClientState;\n }\n\n private persistClientState(): void {\n this.setItem(\n this.localClientStorageKey,\n this.localClientState.toWebStorageJSON()\n );\n }\n\n private persistMutationState(\n batchId: BatchId,\n state: MutationBatchState,\n error?: FirestoreError\n ): void {\n const mutationState = new MutationMetadata(\n this.currentUser,\n batchId,\n state,\n error\n );\n const mutationKey = createWebStorageMutationBatchKey(\n this.persistenceKey,\n this.currentUser,\n batchId\n );\n this.setItem(mutationKey, mutationState.toWebStorageJSON());\n }\n\n private removeMutationState(batchId: BatchId): void {\n const mutationKey = createWebStorageMutationBatchKey(\n this.persistenceKey,\n this.currentUser,\n batchId\n );\n this.removeItem(mutationKey);\n }\n\n private persistOnlineState(onlineState: OnlineState): void {\n const entry: SharedOnlineStateSchema = {\n clientId: this.localClientId,\n onlineState\n };\n this.storage.setItem(this.onlineStateKey, JSON.stringify(entry));\n }\n\n private persistQueryTargetState(\n targetId: TargetId,\n state: QueryTargetState,\n error?: FirestoreError\n ): void {\n const targetKey = createWebStorageQueryTargetMetadataKey(\n this.persistenceKey,\n targetId\n );\n const targetMetadata = new QueryTargetMetadata(targetId, state, error);\n this.setItem(targetKey, targetMetadata.toWebStorageJSON());\n }\n\n /**\n * Parses a client state key in WebStorage. Returns null if the key does not\n * match the expected key format.\n */\n private fromWebStorageClientStateKey(key: string): ClientId | null {\n const match = this.clientStateKeyRe.exec(key);\n return match ? match[1] : null;\n }\n\n /**\n * Parses a client state in WebStorage. Returns 'null' if the value could not\n * be parsed.\n */\n private fromWebStorageClientState(\n key: string,\n value: string\n ): RemoteClientState | null {\n const clientId = this.fromWebStorageClientStateKey(key);\n debugAssert(clientId !== null, `Cannot parse client state key '${key}'`);\n return RemoteClientState.fromWebStorageEntry(clientId, value);\n }\n\n /**\n * Parses a mutation batch state in WebStorage. Returns 'null' if the value\n * could not be parsed.\n */\n private fromWebStorageMutationMetadata(\n key: string,\n value: string\n ): MutationMetadata | null {\n const match = this.mutationBatchKeyRe.exec(key);\n debugAssert(match !== null, `Cannot parse mutation batch key '${key}'`);\n\n const batchId = Number(match[1]);\n const userId = match[2] !== undefined ? match[2] : null;\n return MutationMetadata.fromWebStorageEntry(\n new User(userId),\n batchId,\n value\n );\n }\n\n /**\n * Parses a query target state from WebStorage. Returns 'null' if the value\n * could not be parsed.\n */\n private fromWebStorageQueryTargetMetadata(\n key: string,\n value: string\n ): QueryTargetMetadata | null {\n const match = this.queryTargetKeyRe.exec(key);\n debugAssert(match !== null, `Cannot parse query target key '${key}'`);\n\n const targetId = Number(match[1]);\n return QueryTargetMetadata.fromWebStorageEntry(targetId, value);\n }\n\n /**\n * Parses an online state from WebStorage. Returns 'null' if the value\n * could not be parsed.\n */\n private fromWebStorageOnlineState(value: string): SharedOnlineState | null {\n return SharedOnlineState.fromWebStorageEntry(value);\n }\n\n private async handleMutationBatchEvent(\n mutationBatch: MutationMetadata\n ): Promise<void> {\n if (mutationBatch.user.uid !== this.currentUser.uid) {\n logDebug(\n LOG_TAG,\n `Ignoring mutation for non-active user ${mutationBatch.user.uid}`\n );\n return;\n }\n\n return this.syncEngine!.applyBatchState(\n mutationBatch.batchId,\n mutationBatch.state,\n mutationBatch.error\n );\n }\n\n private handleQueryTargetEvent(\n targetMetadata: QueryTargetMetadata\n ): Promise<void> {\n return this.syncEngine!.applyTargetState(\n targetMetadata.targetId,\n targetMetadata.state,\n targetMetadata.error\n );\n }\n\n private handleClientStateEvent(\n clientId: ClientId,\n clientState: RemoteClientState | null\n ): Promise<void> {\n const updatedClients = clientState\n ? this.activeClients.insert(clientId, clientState)\n : this.activeClients.remove(clientId);\n\n const existingTargets = this.extractActiveQueryTargets(this.activeClients);\n const newTargets = this.extractActiveQueryTargets(updatedClients);\n\n const addedTargets: TargetId[] = [];\n const removedTargets: TargetId[] = [];\n\n newTargets.forEach(targetId => {\n if (!existingTargets.has(targetId)) {\n addedTargets.push(targetId);\n }\n });\n\n existingTargets.forEach(targetId => {\n if (!newTargets.has(targetId)) {\n removedTargets.push(targetId);\n }\n });\n\n return this.syncEngine!.applyActiveTargetsChange(\n addedTargets,\n removedTargets\n ).then(() => {\n this.activeClients = updatedClients;\n });\n }\n\n private handleOnlineStateEvent(onlineState: SharedOnlineState): void {\n // We check whether the client that wrote this online state is still active\n // by comparing its client ID to the list of clients kept active in\n // IndexedDb. If a client does not update their IndexedDb client state\n // within 5 seconds, it is considered inactive and we don't emit an online\n // state event.\n if (this.activeClients.get(onlineState.clientId)) {\n this.onlineStateHandler!(onlineState.onlineState);\n }\n }\n\n private extractActiveQueryTargets(\n clients: SortedMap<string, ClientState>\n ): SortedSet<TargetId> {\n let activeTargets = targetIdSet();\n clients.forEach((kev, value) => {\n activeTargets = activeTargets.unionWith(value.activeTargetIds);\n });\n return activeTargets;\n }\n}\n\nfunction fromWebStorageSequenceNumber(\n seqString: string | null\n): ListenSequenceNumber {\n let sequenceNumber = ListenSequence.INVALID;\n if (seqString != null) {\n try {\n const parsed = JSON.parse(seqString);\n hardAssert(\n typeof parsed === 'number',\n 'Found non-numeric sequence number'\n );\n sequenceNumber = parsed;\n } catch (e) {\n logError(LOG_TAG, 'Failed to read sequence number from WebStorage', e);\n }\n }\n return sequenceNumber;\n}\n\n/**\n * `MemorySharedClientState` is a simple implementation of SharedClientState for\n * clients using memory persistence. The state in this class remains fully\n * isolated and no synchronization is performed.\n */\nexport class MemorySharedClientState implements SharedClientState {\n private localState = new LocalClientState();\n private queryState: { [targetId: number]: QueryTargetState } = {};\n\n syncEngine: SharedClientStateSyncer | null = null;\n onlineStateHandler: ((onlineState: OnlineState) => void) | null = null;\n sequenceNumberHandler:\n | ((sequenceNumber: ListenSequenceNumber) => void)\n | null = null;\n\n addPendingMutation(batchId: BatchId): void {\n // No op.\n }\n\n updateMutationState(\n batchId: BatchId,\n state: 'acknowledged' | 'rejected',\n error?: FirestoreError\n ): void {\n // No op.\n }\n\n addLocalQueryTarget(targetId: TargetId): QueryTargetState {\n this.localState.addQueryTarget(targetId);\n return this.queryState[targetId] || 'not-current';\n }\n\n updateQueryState(\n targetId: TargetId,\n state: QueryTargetState,\n error?: FirestoreError\n ): void {\n this.queryState[targetId] = state;\n }\n\n removeLocalQueryTarget(targetId: TargetId): void {\n this.localState.removeQueryTarget(targetId);\n }\n\n isLocalQueryTarget(targetId: TargetId): boolean {\n return this.localState.activeTargetIds.has(targetId);\n }\n\n clearQueryState(targetId: TargetId): void {\n delete this.queryState[targetId];\n }\n\n getAllActiveQueryTargets(): TargetIdSet {\n return this.localState.activeTargetIds;\n }\n\n isActiveQueryTarget(targetId: TargetId): boolean {\n return this.localState.activeTargetIds.has(targetId);\n }\n\n start(): Promise<void> {\n this.localState = new LocalClientState();\n return Promise.resolve();\n }\n\n handleUserChange(\n user: User,\n removedBatchIds: BatchId[],\n addedBatchIds: BatchId[]\n ): void {\n // No op.\n }\n\n setOnlineState(onlineState: OnlineState): void {\n // No op.\n }\n\n shutdown(): void {}\n\n writeSequenceNumber(sequenceNumber: ListenSequenceNumber): void {}\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { QueryResult } from '../local/local_store';\nimport {\n documentKeySet,\n DocumentKeySet,\n MaybeDocumentMap\n} from '../model/collections';\nimport { Document, MaybeDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { DocumentSet } from '../model/document_set';\nimport { TargetChange } from '../remote/remote_event';\nimport { debugAssert, fail } from '../util/assert';\n\nimport { Query } from './query';\nimport { OnlineState } from './types';\nimport {\n ChangeType,\n DocumentChangeSet,\n SyncState,\n ViewSnapshot\n} from './view_snapshot';\n\nexport type LimboDocumentChange = AddedLimboDocument | RemovedLimboDocument;\nexport class AddedLimboDocument {\n constructor(public key: DocumentKey) {}\n}\nexport class RemovedLimboDocument {\n constructor(public key: DocumentKey) {}\n}\n\n/** The result of applying a set of doc changes to a view. */\nexport interface ViewDocumentChanges {\n /** The new set of docs that should be in the view. */\n documentSet: DocumentSet;\n /** The diff of these docs with the previous set of docs. */\n changeSet: DocumentChangeSet;\n /**\n * Whether the set of documents passed in was not sufficient to calculate the\n * new state of the view and there needs to be another pass based on the\n * local cache.\n */\n needsRefill: boolean;\n\n mutatedKeys: DocumentKeySet;\n}\n\nexport interface ViewChange {\n snapshot?: ViewSnapshot;\n limboChanges: LimboDocumentChange[];\n}\n\n/**\n * View is responsible for computing the final merged truth of what docs are in\n * a query. It gets notified of local and remote changes to docs, and applies\n * the query filters and limits to determine the most correct possible results.\n */\nexport class View {\n private syncState: SyncState | null = null;\n /**\n * A flag whether the view is current with the backend. A view is considered\n * current after it has seen the current flag from the backend and did not\n * lose consistency within the watch stream (e.g. because of an existence\n * filter mismatch).\n */\n private current = false;\n private documentSet: DocumentSet;\n /** Documents in the view but not in the remote target */\n private limboDocuments = documentKeySet();\n /** Document Keys that have local changes */\n private mutatedKeys = documentKeySet();\n\n constructor(\n private query: Query,\n /** Documents included in the remote target */\n private _syncedDocuments: DocumentKeySet\n ) {\n this.documentSet = new DocumentSet(query.docComparator.bind(query));\n }\n\n /**\n * The set of remote documents that the server has told us belongs to the target associated with\n * this view.\n */\n get syncedDocuments(): DocumentKeySet {\n return this._syncedDocuments;\n }\n\n /**\n * Iterates over a set of doc changes, applies the query limit, and computes\n * what the new results should be, what the changes were, and whether we may\n * need to go back to the local cache for more results. Does not make any\n * changes to the view.\n * @param docChanges The doc changes to apply to this view.\n * @param previousChanges If this is being called with a refill, then start\n * with this set of docs and changes instead of the current view.\n * @return a new set of docs, changes, and refill flag.\n */\n computeDocChanges(\n docChanges: MaybeDocumentMap,\n previousChanges?: ViewDocumentChanges\n ): ViewDocumentChanges {\n const changeSet = previousChanges\n ? previousChanges.changeSet\n : new DocumentChangeSet();\n const oldDocumentSet = previousChanges\n ? previousChanges.documentSet\n : this.documentSet;\n let newMutatedKeys = previousChanges\n ? previousChanges.mutatedKeys\n : this.mutatedKeys;\n let newDocumentSet = oldDocumentSet;\n let needsRefill = false;\n\n // Track the last doc in a (full) limit. This is necessary, because some\n // update (a delete, or an update moving a doc past the old limit) might\n // mean there is some other document in the local cache that either should\n // come (1) between the old last limit doc and the new last document, in the\n // case of updates, or (2) after the new last document, in the case of\n // deletes. So we keep this doc at the old limit to compare the updates to.\n //\n // Note that this should never get used in a refill (when previousChanges is\n // set), because there will only be adds -- no deletes or updates.\n const lastDocInLimit =\n this.query.hasLimitToFirst() && oldDocumentSet.size === this.query.limit\n ? oldDocumentSet.last()\n : null;\n const firstDocInLimit =\n this.query.hasLimitToLast() && oldDocumentSet.size === this.query.limit\n ? oldDocumentSet.first()\n : null;\n\n docChanges.inorderTraversal(\n (key: DocumentKey, newMaybeDoc: MaybeDocument) => {\n const oldDoc = oldDocumentSet.get(key);\n let newDoc = newMaybeDoc instanceof Document ? newMaybeDoc : null;\n if (newDoc) {\n debugAssert(\n key.isEqual(newDoc.key),\n 'Mismatching keys found in document changes: ' +\n key +\n ' != ' +\n newDoc.key\n );\n newDoc = this.query.matches(newDoc) ? newDoc : null;\n }\n\n const oldDocHadPendingMutations = oldDoc\n ? this.mutatedKeys.has(oldDoc.key)\n : false;\n const newDocHasPendingMutations = newDoc\n ? newDoc.hasLocalMutations ||\n // We only consider committed mutations for documents that were\n // mutated during the lifetime of the view.\n (this.mutatedKeys.has(newDoc.key) && newDoc.hasCommittedMutations)\n : false;\n\n let changeApplied = false;\n\n // Calculate change\n if (oldDoc && newDoc) {\n const docsEqual = oldDoc.data().isEqual(newDoc.data());\n if (!docsEqual) {\n if (!this.shouldWaitForSyncedDocument(oldDoc, newDoc)) {\n changeSet.track({\n type: ChangeType.Modified,\n doc: newDoc\n });\n changeApplied = true;\n\n if (\n (lastDocInLimit &&\n this.query.docComparator(newDoc, lastDocInLimit) > 0) ||\n (firstDocInLimit &&\n this.query.docComparator(newDoc, firstDocInLimit) < 0)\n ) {\n // This doc moved from inside the limit to outside the limit.\n // That means there may be some other doc in the local cache\n // that should be included instead.\n needsRefill = true;\n }\n }\n } else if (oldDocHadPendingMutations !== newDocHasPendingMutations) {\n changeSet.track({ type: ChangeType.Metadata, doc: newDoc });\n changeApplied = true;\n }\n } else if (!oldDoc && newDoc) {\n changeSet.track({ type: ChangeType.Added, doc: newDoc });\n changeApplied = true;\n } else if (oldDoc && !newDoc) {\n changeSet.track({ type: ChangeType.Removed, doc: oldDoc });\n changeApplied = true;\n\n if (lastDocInLimit || firstDocInLimit) {\n // A doc was removed from a full limit query. We'll need to\n // requery from the local cache to see if we know about some other\n // doc that should be in the results.\n needsRefill = true;\n }\n }\n\n if (changeApplied) {\n if (newDoc) {\n newDocumentSet = newDocumentSet.add(newDoc);\n if (newDocHasPendingMutations) {\n newMutatedKeys = newMutatedKeys.add(key);\n } else {\n newMutatedKeys = newMutatedKeys.delete(key);\n }\n } else {\n newDocumentSet = newDocumentSet.delete(key);\n newMutatedKeys = newMutatedKeys.delete(key);\n }\n }\n }\n );\n\n // Drop documents out to meet limit/limitToLast requirement.\n if (this.query.hasLimitToFirst() || this.query.hasLimitToLast()) {\n while (newDocumentSet.size > this.query.limit!) {\n const oldDoc = this.query.hasLimitToFirst()\n ? newDocumentSet.last()\n : newDocumentSet.first();\n newDocumentSet = newDocumentSet.delete(oldDoc!.key);\n newMutatedKeys = newMutatedKeys.delete(oldDoc!.key);\n changeSet.track({ type: ChangeType.Removed, doc: oldDoc! });\n }\n }\n\n debugAssert(\n !needsRefill || !previousChanges,\n 'View was refilled using docs that themselves needed refilling.'\n );\n return {\n documentSet: newDocumentSet,\n changeSet,\n needsRefill,\n mutatedKeys: newMutatedKeys\n };\n }\n\n private shouldWaitForSyncedDocument(\n oldDoc: Document,\n newDoc: Document\n ): boolean {\n // We suppress the initial change event for documents that were modified as\n // part of a write acknowledgment (e.g. when the value of a server transform\n // is applied) as Watch will send us the same document again.\n // By suppressing the event, we only raise two user visible events (one with\n // `hasPendingWrites` and the final state of the document) instead of three\n // (one with `hasPendingWrites`, the modified document with\n // `hasPendingWrites` and the final state of the document).\n return (\n oldDoc.hasLocalMutations &&\n newDoc.hasCommittedMutations &&\n !newDoc.hasLocalMutations\n );\n }\n\n /**\n * Updates the view with the given ViewDocumentChanges and optionally updates\n * limbo docs and sync state from the provided target change.\n * @param docChanges The set of changes to make to the view's docs.\n * @param updateLimboDocuments Whether to update limbo documents based on this\n * change.\n * @param targetChange A target change to apply for computing limbo docs and\n * sync state.\n * @return A new ViewChange with the given docs, changes, and sync state.\n */\n // PORTING NOTE: The iOS/Android clients always compute limbo document changes.\n applyChanges(\n docChanges: ViewDocumentChanges,\n updateLimboDocuments: boolean,\n targetChange?: TargetChange\n ): ViewChange {\n debugAssert(\n !docChanges.needsRefill,\n 'Cannot apply changes that need a refill'\n );\n const oldDocs = this.documentSet;\n this.documentSet = docChanges.documentSet;\n this.mutatedKeys = docChanges.mutatedKeys;\n // Sort changes based on type and query comparator\n const changes = docChanges.changeSet.getChanges();\n changes.sort((c1, c2) => {\n return (\n compareChangeType(c1.type, c2.type) ||\n this.query.docComparator(c1.doc, c2.doc)\n );\n });\n\n this.applyTargetChange(targetChange);\n const limboChanges = updateLimboDocuments\n ? this.updateLimboDocuments()\n : [];\n const synced = this.limboDocuments.size === 0 && this.current;\n const newSyncState = synced ? SyncState.Synced : SyncState.Local;\n const syncStateChanged = newSyncState !== this.syncState;\n this.syncState = newSyncState;\n\n if (changes.length === 0 && !syncStateChanged) {\n // no changes\n return { limboChanges };\n } else {\n const snap: ViewSnapshot = new ViewSnapshot(\n this.query,\n docChanges.documentSet,\n oldDocs,\n changes,\n docChanges.mutatedKeys,\n newSyncState === SyncState.Local,\n syncStateChanged,\n /* excludesMetadataChanges= */ false\n );\n return {\n snapshot: snap,\n limboChanges\n };\n }\n }\n\n /**\n * Applies an OnlineState change to the view, potentially generating a\n * ViewChange if the view's syncState changes as a result.\n */\n applyOnlineStateChange(onlineState: OnlineState): ViewChange {\n if (this.current && onlineState === OnlineState.Offline) {\n // If we're offline, set `current` to false and then call applyChanges()\n // to refresh our syncState and generate a ViewChange as appropriate. We\n // are guaranteed to get a new TargetChange that sets `current` back to\n // true once the client is back online.\n this.current = false;\n return this.applyChanges(\n {\n documentSet: this.documentSet,\n changeSet: new DocumentChangeSet(),\n mutatedKeys: this.mutatedKeys,\n needsRefill: false\n },\n /* updateLimboDocuments= */ false\n );\n } else {\n // No effect, just return a no-op ViewChange.\n return { limboChanges: [] };\n }\n }\n\n /**\n * Returns whether the doc for the given key should be in limbo.\n */\n private shouldBeInLimbo(key: DocumentKey): boolean {\n // If the remote end says it's part of this query, it's not in limbo.\n if (this._syncedDocuments.has(key)) {\n return false;\n }\n // The local store doesn't think it's a result, so it shouldn't be in limbo.\n if (!this.documentSet.has(key)) {\n return false;\n }\n // If there are local changes to the doc, they might explain why the server\n // doesn't know that it's part of the query. So don't put it in limbo.\n // TODO(klimt): Ideally, we would only consider changes that might actually\n // affect this specific query.\n if (this.documentSet.get(key)!.hasLocalMutations) {\n return false;\n }\n // Everything else is in limbo.\n return true;\n }\n\n /**\n * Updates syncedDocuments, current, and limbo docs based on the given change.\n * Returns the list of changes to which docs are in limbo.\n */\n private applyTargetChange(targetChange?: TargetChange): void {\n if (targetChange) {\n targetChange.addedDocuments.forEach(\n key => (this._syncedDocuments = this._syncedDocuments.add(key))\n );\n targetChange.modifiedDocuments.forEach(key => {\n debugAssert(\n this._syncedDocuments.has(key),\n `Modified document ${key} not found in view.`\n );\n });\n targetChange.removedDocuments.forEach(\n key => (this._syncedDocuments = this._syncedDocuments.delete(key))\n );\n this.current = targetChange.current;\n }\n }\n\n private updateLimboDocuments(): LimboDocumentChange[] {\n // We can only determine limbo documents when we're in-sync with the server.\n if (!this.current) {\n return [];\n }\n\n // TODO(klimt): Do this incrementally so that it's not quadratic when\n // updating many documents.\n const oldLimboDocuments = this.limboDocuments;\n this.limboDocuments = documentKeySet();\n this.documentSet.forEach(doc => {\n if (this.shouldBeInLimbo(doc.key)) {\n this.limboDocuments = this.limboDocuments.add(doc.key);\n }\n });\n\n // Diff the new limbo docs with the old limbo docs.\n const changes: LimboDocumentChange[] = [];\n oldLimboDocuments.forEach(key => {\n if (!this.limboDocuments.has(key)) {\n changes.push(new RemovedLimboDocument(key));\n }\n });\n this.limboDocuments.forEach(key => {\n if (!oldLimboDocuments.has(key)) {\n changes.push(new AddedLimboDocument(key));\n }\n });\n return changes;\n }\n\n /**\n * Update the in-memory state of the current view with the state read from\n * persistence.\n *\n * We update the query view whenever a client's primary status changes:\n * - When a client transitions from primary to secondary, it can miss\n * LocalStorage updates and its query views may temporarily not be\n * synchronized with the state on disk.\n * - For secondary to primary transitions, the client needs to update the list\n * of `syncedDocuments` since secondary clients update their query views\n * based purely on synthesized RemoteEvents.\n *\n * @param queryResult.documents - The documents that match the query according\n * to the LocalStore.\n * @param queryResult.remoteKeys - The keys of the documents that match the\n * query according to the backend.\n *\n * @return The ViewChange that resulted from this synchronization.\n */\n // PORTING NOTE: Multi-tab only.\n synchronizeWithPersistedState(queryResult: QueryResult): ViewChange {\n this._syncedDocuments = queryResult.remoteKeys;\n this.limboDocuments = documentKeySet();\n const docChanges = this.computeDocChanges(queryResult.documents);\n return this.applyChanges(docChanges, /*updateLimboDocuments=*/ true);\n }\n\n /**\n * Returns a view snapshot as if this query was just listened to. Contains\n * a document add for every existing document and the `fromCache` and\n * `hasPendingWrites` status of the already established view.\n */\n // PORTING NOTE: Multi-tab only.\n computeInitialSnapshot(): ViewSnapshot {\n return ViewSnapshot.fromInitialDocuments(\n this.query,\n this.documentSet,\n this.mutatedKeys,\n this.syncState === SyncState.Local\n );\n }\n}\n\nfunction compareChangeType(c1: ChangeType, c2: ChangeType): number {\n const order = (change: ChangeType): 0 | 1 | 2 => {\n switch (change) {\n case ChangeType.Added:\n return 1;\n case ChangeType.Modified:\n return 2;\n case ChangeType.Metadata:\n // A metadata change is converted to a modified change at the public\n // api layer. Since we sort by document key and then change type,\n // metadata and modified changes must be sorted equivalently.\n return 2;\n case ChangeType.Removed:\n return 0;\n default:\n return fail('Unknown ChangeType: ' + change);\n }\n };\n\n return order(c1) - order(c2);\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Deferred } from '../util/promise';\nimport { TimerId, AsyncQueue } from '../util/async_queue';\nimport { ExponentialBackoff } from '../remote/backoff';\nimport { Transaction } from './transaction';\nimport { RemoteStore } from '../remote/remote_store';\nimport { isNullOrUndefined } from '../util/types';\nimport { isPermanentError } from '../remote/rpc_error';\nimport { FirestoreError } from '../util/error';\n\nconst RETRY_COUNT = 5;\n\n/**\n * TransactionRunner encapsulates the logic needed to run and retry transactions\n * with backoff.\n */\nexport class TransactionRunner<T> {\n private retries = RETRY_COUNT;\n private backoff: ExponentialBackoff;\n\n constructor(\n private readonly asyncQueue: AsyncQueue,\n private readonly remoteStore: RemoteStore,\n private readonly updateFunction: (transaction: Transaction) => Promise<T>,\n private readonly deferred: Deferred<T>\n ) {\n this.backoff = new ExponentialBackoff(\n this.asyncQueue,\n TimerId.TransactionRetry\n );\n }\n\n /** Runs the transaction and sets the result on deferred. */\n run(): void {\n this.runWithBackOff();\n }\n\n private runWithBackOff(): void {\n this.backoff.backoffAndRun(async () => {\n const transaction = this.remoteStore.createTransaction();\n const userPromise = this.tryRunUpdateFunction(transaction);\n if (userPromise) {\n userPromise\n .then(result => {\n this.asyncQueue.enqueueAndForget(() => {\n return transaction\n .commit()\n .then(() => {\n this.deferred.resolve(result);\n })\n .catch(commitError => {\n this.handleTransactionError(commitError);\n });\n });\n })\n .catch(userPromiseError => {\n this.handleTransactionError(userPromiseError);\n });\n }\n });\n }\n\n private tryRunUpdateFunction(transaction: Transaction): Promise<T> | null {\n try {\n const userPromise = this.updateFunction(transaction);\n if (\n isNullOrUndefined(userPromise) ||\n !userPromise.catch ||\n !userPromise.then\n ) {\n this.deferred.reject(\n Error('Transaction callback must return a Promise')\n );\n return null;\n }\n return userPromise;\n } catch (error) {\n // Do not retry errors thrown by user provided updateFunction.\n this.deferred.reject(error);\n return null;\n }\n }\n\n private handleTransactionError(error: Error): void {\n if (this.retries > 0 && this.isRetryableTransactionError(error)) {\n this.retries -= 1;\n this.asyncQueue.enqueueAndForget(() => {\n this.runWithBackOff();\n return Promise.resolve();\n });\n } else {\n this.deferred.reject(error);\n }\n }\n\n private isRetryableTransactionError(error: Error): boolean {\n if (error.name === 'FirebaseError') {\n // In transactions, the backend will fail outdated reads with FAILED_PRECONDITION and\n // non-matching document versions with ABORTED. These errors should be retried.\n const code = (error as FirestoreError).code;\n return (\n code === 'aborted' ||\n code === 'failed-precondition' ||\n !isPermanentError(code)\n );\n }\n return false;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { User } from '../auth/user';\nimport {\n ignoreIfPrimaryLeaseLoss,\n LocalStore,\n MultiTabLocalStore\n} from '../local/local_store';\nimport { LocalViewChanges } from '../local/local_view_changes';\nimport { ReferenceSet } from '../local/reference_set';\nimport { TargetData, TargetPurpose } from '../local/target_data';\nimport {\n documentKeySet,\n DocumentKeySet,\n MaybeDocumentMap\n} from '../model/collections';\nimport { MaybeDocument, NoDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { Mutation } from '../model/mutation';\nimport { BATCHID_UNKNOWN, MutationBatchResult } from '../model/mutation_batch';\nimport { RemoteEvent, TargetChange } from '../remote/remote_event';\nimport { RemoteStore } from '../remote/remote_store';\nimport { RemoteSyncer } from '../remote/remote_syncer';\nimport { debugAssert, fail, hardAssert } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport { logDebug } from '../util/log';\nimport { primitiveComparator } from '../util/misc';\nimport { ObjectMap } from '../util/obj_map';\nimport { Deferred } from '../util/promise';\nimport { SortedMap } from '../util/sorted_map';\n\nimport { ClientId, SharedClientState } from '../local/shared_client_state';\nimport {\n QueryTargetState,\n SharedClientStateSyncer\n} from '../local/shared_client_state_syncer';\nimport { SortedSet } from '../util/sorted_set';\nimport { ListenSequence } from './listen_sequence';\nimport { LimitType, Query } from './query';\nimport { SnapshotVersion } from './snapshot_version';\nimport { Target } from './target';\nimport { TargetIdGenerator } from './target_id_generator';\nimport { Transaction } from './transaction';\nimport {\n BatchId,\n MutationBatchState,\n OnlineState,\n OnlineStateSource,\n TargetId\n} from './types';\nimport {\n AddedLimboDocument,\n LimboDocumentChange,\n RemovedLimboDocument,\n View,\n ViewChange,\n ViewDocumentChanges\n} from './view';\nimport { ViewSnapshot } from './view_snapshot';\nimport { AsyncQueue, wrapInUserErrorIfRecoverable } from '../util/async_queue';\nimport { TransactionRunner } from './transaction_runner';\n\nconst LOG_TAG = 'SyncEngine';\n\n/**\n * QueryView contains all of the data that SyncEngine needs to keep track of for\n * a particular query.\n */\nclass QueryView {\n constructor(\n /**\n * The query itself.\n */\n public query: Query,\n /**\n * The target number created by the client that is used in the watch\n * stream to identify this query.\n */\n public targetId: TargetId,\n /**\n * The view is responsible for computing the final merged truth of what\n * docs are in the query. It gets notified of local and remote changes,\n * and applies the query filters and limits to determine the most correct\n * possible results.\n */\n public view: View\n ) {}\n}\n\n/** Tracks a limbo resolution. */\nclass LimboResolution {\n constructor(public key: DocumentKey) {}\n\n /**\n * Set to true once we've received a document. This is used in\n * getRemoteKeysForTarget() and ultimately used by WatchChangeAggregator to\n * decide whether it needs to manufacture a delete event for the target once\n * the target is CURRENT.\n */\n receivedDocument: boolean = false;\n}\n\n/**\n * Interface implemented by EventManager to handle notifications from\n * SyncEngine.\n */\nexport interface SyncEngineListener {\n /** Handles new view snapshots. */\n onWatchChange(snapshots: ViewSnapshot[]): void;\n\n /** Handles the failure of a query. */\n onWatchError(query: Query, error: Error): void;\n\n /** Handles a change in online state. */\n onOnlineStateChange(onlineState: OnlineState): void;\n}\n\n/**\n * SyncEngine is the central controller in the client SDK architecture. It is\n * the glue code between the EventManager, LocalStore, and RemoteStore. Some of\n * SyncEngine's responsibilities include:\n * 1. Coordinating client requests and remote events between the EventManager\n * and the local and remote data stores.\n * 2. Managing a View object for each query, providing the unified view between\n * the local and remote data stores.\n * 3. Notifying the RemoteStore when the LocalStore has new mutations in its\n * queue that need sending to the backend.\n *\n * The SyncEngine’s methods should only ever be called by methods running in the\n * global async queue.\n */\nexport class SyncEngine implements RemoteSyncer {\n protected syncEngineListener: SyncEngineListener | null = null;\n\n protected queryViewsByQuery = new ObjectMap<Query, QueryView>(q =>\n q.canonicalId()\n );\n protected queriesByTarget = new Map<TargetId, Query[]>();\n /**\n * The keys of documents that are in limbo for which we haven't yet started a\n * limbo resolution query.\n */\n private enqueuedLimboResolutions: DocumentKey[] = [];\n /**\n * Keeps track of the target ID for each document that is in limbo with an\n * active target.\n */\n protected activeLimboTargetsByKey = new SortedMap<DocumentKey, TargetId>(\n DocumentKey.comparator\n );\n /**\n * Keeps track of the information about an active limbo resolution for each\n * active target ID that was started for the purpose of limbo resolution.\n */\n protected activeLimboResolutionsByTarget = new Map<\n TargetId,\n LimboResolution\n >();\n protected limboDocumentRefs = new ReferenceSet();\n /** Stores user completion handlers, indexed by User and BatchId. */\n private mutationUserCallbacks = {} as {\n [uidKey: string]: SortedMap<BatchId, Deferred<void>>;\n };\n /** Stores user callbacks waiting for all pending writes to be acknowledged. */\n private pendingWritesCallbacks = new Map<BatchId, Array<Deferred<void>>>();\n private limboTargetIdGenerator = TargetIdGenerator.forSyncEngine();\n\n private onlineState = OnlineState.Unknown;\n\n constructor(\n protected localStore: LocalStore,\n protected remoteStore: RemoteStore,\n // PORTING NOTE: Manages state synchronization in multi-tab environments.\n protected sharedClientState: SharedClientState,\n private currentUser: User,\n private maxConcurrentLimboResolutions: number\n ) {}\n\n get isPrimaryClient(): boolean {\n return true;\n }\n\n /** Subscribes to SyncEngine notifications. Has to be called exactly once. */\n subscribe(syncEngineListener: SyncEngineListener): void {\n debugAssert(\n syncEngineListener !== null,\n 'SyncEngine listener cannot be null'\n );\n debugAssert(\n this.syncEngineListener === null,\n 'SyncEngine already has a subscriber.'\n );\n\n this.syncEngineListener = syncEngineListener;\n }\n\n /**\n * Initiates the new listen, resolves promise when listen enqueued to the\n * server. All the subsequent view snapshots or errors are sent to the\n * subscribed handlers. Returns the initial snapshot.\n */\n async listen(query: Query): Promise<ViewSnapshot> {\n this.assertSubscribed('listen()');\n\n let targetId;\n let viewSnapshot;\n\n const queryView = this.queryViewsByQuery.get(query);\n if (queryView) {\n // PORTING NOTE: With Multi-Tab Web, it is possible that a query view\n // already exists when EventManager calls us for the first time. This\n // happens when the primary tab is already listening to this query on\n // behalf of another tab and the user of the primary also starts listening\n // to the query. EventManager will not have an assigned target ID in this\n // case and calls `listen` to obtain this ID.\n targetId = queryView.targetId;\n this.sharedClientState.addLocalQueryTarget(targetId);\n viewSnapshot = queryView.view.computeInitialSnapshot();\n } else {\n const targetData = await this.localStore.allocateTarget(query.toTarget());\n\n const status = this.sharedClientState.addLocalQueryTarget(\n targetData.targetId\n );\n targetId = targetData.targetId;\n viewSnapshot = await this.initializeViewAndComputeSnapshot(\n query,\n targetId,\n status === 'current'\n );\n if (this.isPrimaryClient) {\n this.remoteStore.listen(targetData);\n }\n }\n\n return viewSnapshot;\n }\n\n /**\n * Registers a view for a previously unknown query and computes its initial\n * snapshot.\n */\n protected async initializeViewAndComputeSnapshot(\n query: Query,\n targetId: TargetId,\n current: boolean\n ): Promise<ViewSnapshot> {\n const queryResult = await this.localStore.executeQuery(\n query,\n /* usePreviousResults= */ true\n );\n const view = new View(query, queryResult.remoteKeys);\n const viewDocChanges = view.computeDocChanges(queryResult.documents);\n const synthesizedTargetChange = TargetChange.createSynthesizedTargetChangeForCurrentChange(\n targetId,\n current && this.onlineState !== OnlineState.Offline\n );\n const viewChange = view.applyChanges(\n viewDocChanges,\n /* updateLimboDocuments= */ this.isPrimaryClient,\n synthesizedTargetChange\n );\n this.updateTrackedLimbos(targetId, viewChange.limboChanges);\n\n debugAssert(\n !!viewChange.snapshot,\n 'applyChanges for new view should always return a snapshot'\n );\n\n const data = new QueryView(query, targetId, view);\n this.queryViewsByQuery.set(query, data);\n if (this.queriesByTarget.has(targetId)) {\n this.queriesByTarget.get(targetId)!.push(query);\n } else {\n this.queriesByTarget.set(targetId, [query]);\n }\n return viewChange.snapshot!;\n }\n\n /** Stops listening to the query. */\n async unlisten(query: Query): Promise<void> {\n this.assertSubscribed('unlisten()');\n\n const queryView = this.queryViewsByQuery.get(query)!;\n debugAssert(!!queryView, 'Trying to unlisten on query not found:' + query);\n\n // Only clean up the query view and target if this is the only query mapped\n // to the target.\n const queries = this.queriesByTarget.get(queryView.targetId)!;\n if (queries.length > 1) {\n this.queriesByTarget.set(\n queryView.targetId,\n queries.filter(q => !q.isEqual(query))\n );\n this.queryViewsByQuery.delete(query);\n return;\n }\n\n // No other queries are mapped to the target, clean up the query and the target.\n if (this.isPrimaryClient) {\n // We need to remove the local query target first to allow us to verify\n // whether any other client is still interested in this target.\n this.sharedClientState.removeLocalQueryTarget(queryView.targetId);\n const targetRemainsActive = this.sharedClientState.isActiveQueryTarget(\n queryView.targetId\n );\n\n if (!targetRemainsActive) {\n await this.localStore\n .releaseTarget(queryView.targetId, /*keepPersistedTargetData=*/ false)\n .then(() => {\n this.sharedClientState.clearQueryState(queryView.targetId);\n this.remoteStore.unlisten(queryView.targetId);\n this.removeAndCleanupTarget(queryView.targetId);\n })\n .catch(ignoreIfPrimaryLeaseLoss);\n }\n } else {\n this.removeAndCleanupTarget(queryView.targetId);\n await this.localStore.releaseTarget(\n queryView.targetId,\n /*keepPersistedTargetData=*/ true\n );\n }\n }\n\n /**\n * Initiates the write of local mutation batch which involves adding the\n * writes to the mutation queue, notifying the remote store about new\n * mutations and raising events for any changes this write caused.\n *\n * The promise returned by this call is resolved when the above steps\n * have completed, *not* when the write was acked by the backend. The\n * userCallback is resolved once the write was acked/rejected by the\n * backend (or failed locally for any other reason).\n */\n async write(batch: Mutation[], userCallback: Deferred<void>): Promise<void> {\n this.assertSubscribed('write()');\n\n try {\n const result = await this.localStore.localWrite(batch);\n this.sharedClientState.addPendingMutation(result.batchId);\n this.addMutationCallback(result.batchId, userCallback);\n await this.emitNewSnapsAndNotifyLocalStore(result.changes);\n await this.remoteStore.fillWritePipeline();\n } catch (e) {\n // If we can't persist the mutation, we reject the user callback and\n // don't send the mutation. The user can then retry the write.\n const error = wrapInUserErrorIfRecoverable(e, `Failed to persist write`);\n userCallback.reject(error);\n }\n }\n\n /**\n * Takes an updateFunction in which a set of reads and writes can be performed\n * atomically. In the updateFunction, the client can read and write values\n * using the supplied transaction object. After the updateFunction, all\n * changes will be committed. If a retryable error occurs (ex: some other\n * client has changed any of the data referenced), then the updateFunction\n * will be called again after a backoff. If the updateFunction still fails\n * after all retries, then the transaction will be rejected.\n *\n * The transaction object passed to the updateFunction contains methods for\n * accessing documents and collections. Unlike other datastore access, data\n * accessed with the transaction will not reflect local changes that have not\n * been committed. For this reason, it is required that all reads are\n * performed before any writes. Transactions must be performed while online.\n *\n * The Deferred input is resolved when the transaction is fully committed.\n */\n runTransaction<T>(\n asyncQueue: AsyncQueue,\n updateFunction: (transaction: Transaction) => Promise<T>,\n deferred: Deferred<T>\n ): void {\n new TransactionRunner<T>(\n asyncQueue,\n this.remoteStore,\n updateFunction,\n deferred\n ).run();\n }\n\n async applyRemoteEvent(remoteEvent: RemoteEvent): Promise<void> {\n this.assertSubscribed('applyRemoteEvent()');\n try {\n const changes = await this.localStore.applyRemoteEvent(remoteEvent);\n // Update `receivedDocument` as appropriate for any limbo targets.\n remoteEvent.targetChanges.forEach((targetChange, targetId) => {\n const limboResolution = this.activeLimboResolutionsByTarget.get(\n targetId\n );\n if (limboResolution) {\n // Since this is a limbo resolution lookup, it's for a single document\n // and it could be added, modified, or removed, but not a combination.\n hardAssert(\n targetChange.addedDocuments.size +\n targetChange.modifiedDocuments.size +\n targetChange.removedDocuments.size <=\n 1,\n 'Limbo resolution for single document contains multiple changes.'\n );\n if (targetChange.addedDocuments.size > 0) {\n limboResolution.receivedDocument = true;\n } else if (targetChange.modifiedDocuments.size > 0) {\n hardAssert(\n limboResolution.receivedDocument,\n 'Received change for limbo target document without add.'\n );\n } else if (targetChange.removedDocuments.size > 0) {\n hardAssert(\n limboResolution.receivedDocument,\n 'Received remove for limbo target document without add.'\n );\n limboResolution.receivedDocument = false;\n } else {\n // This was probably just a CURRENT targetChange or similar.\n }\n }\n });\n await this.emitNewSnapsAndNotifyLocalStore(changes, remoteEvent);\n } catch (error) {\n await ignoreIfPrimaryLeaseLoss(error);\n }\n }\n\n /**\n * Applies an OnlineState change to the sync engine and notifies any views of\n * the change.\n */\n applyOnlineStateChange(\n onlineState: OnlineState,\n source: OnlineStateSource\n ): void {\n this.assertSubscribed('applyOnlineStateChange()');\n const newViewSnapshots = [] as ViewSnapshot[];\n this.queryViewsByQuery.forEach((query, queryView) => {\n const viewChange = queryView.view.applyOnlineStateChange(onlineState);\n debugAssert(\n viewChange.limboChanges.length === 0,\n 'OnlineState should not affect limbo documents.'\n );\n if (viewChange.snapshot) {\n newViewSnapshots.push(viewChange.snapshot);\n }\n });\n this.syncEngineListener!.onOnlineStateChange(onlineState);\n this.syncEngineListener!.onWatchChange(newViewSnapshots);\n this.onlineState = onlineState;\n }\n\n async rejectListen(targetId: TargetId, err: FirestoreError): Promise<void> {\n this.assertSubscribed('rejectListens()');\n\n // PORTING NOTE: Multi-tab only.\n this.sharedClientState.updateQueryState(targetId, 'rejected', err);\n\n const limboResolution = this.activeLimboResolutionsByTarget.get(targetId);\n const limboKey = limboResolution && limboResolution.key;\n if (limboKey) {\n // TODO(klimt): We really only should do the following on permission\n // denied errors, but we don't have the cause code here.\n\n // It's a limbo doc. Create a synthetic event saying it was deleted.\n // This is kind of a hack. Ideally, we would have a method in the local\n // store to purge a document. However, it would be tricky to keep all of\n // the local store's invariants with another method.\n let documentUpdates = new SortedMap<DocumentKey, MaybeDocument>(\n DocumentKey.comparator\n );\n documentUpdates = documentUpdates.insert(\n limboKey,\n new NoDocument(limboKey, SnapshotVersion.min())\n );\n const resolvedLimboDocuments = documentKeySet().add(limboKey);\n const event = new RemoteEvent(\n SnapshotVersion.min(),\n /* targetChanges= */ new Map<TargetId, TargetChange>(),\n /* targetMismatches= */ new SortedSet<TargetId>(primitiveComparator),\n documentUpdates,\n resolvedLimboDocuments\n );\n\n await this.applyRemoteEvent(event);\n\n // Since this query failed, we won't want to manually unlisten to it.\n // We only remove it from bookkeeping after we successfully applied the\n // RemoteEvent. If `applyRemoteEvent()` throws, we want to re-listen to\n // this query when the RemoteStore restarts the Watch stream, which should\n // re-trigger the target failure.\n this.activeLimboTargetsByKey = this.activeLimboTargetsByKey.remove(\n limboKey\n );\n this.activeLimboResolutionsByTarget.delete(targetId);\n this.pumpEnqueuedLimboResolutions();\n } else {\n await this.localStore\n .releaseTarget(targetId, /* keepPersistedTargetData */ false)\n .then(() => this.removeAndCleanupTarget(targetId, err))\n .catch(ignoreIfPrimaryLeaseLoss);\n }\n }\n\n async applySuccessfulWrite(\n mutationBatchResult: MutationBatchResult\n ): Promise<void> {\n this.assertSubscribed('applySuccessfulWrite()');\n\n const batchId = mutationBatchResult.batch.batchId;\n\n // The local store may or may not be able to apply the write result and\n // raise events immediately (depending on whether the watcher is caught\n // up), so we raise user callbacks first so that they consistently happen\n // before listen events.\n this.processUserCallback(batchId, /*error=*/ null);\n\n this.triggerPendingWritesCallbacks(batchId);\n\n try {\n const changes = await this.localStore.acknowledgeBatch(\n mutationBatchResult\n );\n this.sharedClientState.updateMutationState(batchId, 'acknowledged');\n await this.emitNewSnapsAndNotifyLocalStore(changes);\n } catch (error) {\n await ignoreIfPrimaryLeaseLoss(error);\n }\n }\n\n async rejectFailedWrite(\n batchId: BatchId,\n error: FirestoreError\n ): Promise<void> {\n this.assertSubscribed('rejectFailedWrite()');\n\n // The local store may or may not be able to apply the write result and\n // raise events immediately (depending on whether the watcher is caught up),\n // so we raise user callbacks first so that they consistently happen before\n // listen events.\n this.processUserCallback(batchId, error);\n\n this.triggerPendingWritesCallbacks(batchId);\n\n try {\n const changes = await this.localStore.rejectBatch(batchId);\n this.sharedClientState.updateMutationState(batchId, 'rejected', error);\n await this.emitNewSnapsAndNotifyLocalStore(changes);\n } catch (error) {\n await ignoreIfPrimaryLeaseLoss(error);\n }\n }\n\n /**\n * Registers a user callback that resolves when all pending mutations at the moment of calling\n * are acknowledged .\n */\n async registerPendingWritesCallback(callback: Deferred<void>): Promise<void> {\n if (!this.remoteStore.canUseNetwork()) {\n logDebug(\n LOG_TAG,\n 'The network is disabled. The task returned by ' +\n \"'awaitPendingWrites()' will not complete until the network is enabled.\"\n );\n }\n\n try {\n const highestBatchId = await this.localStore.getHighestUnacknowledgedBatchId();\n if (highestBatchId === BATCHID_UNKNOWN) {\n // Trigger the callback right away if there is no pending writes at the moment.\n callback.resolve();\n return;\n }\n\n const callbacks = this.pendingWritesCallbacks.get(highestBatchId) || [];\n callbacks.push(callback);\n this.pendingWritesCallbacks.set(highestBatchId, callbacks);\n } catch (e) {\n const firestoreError = wrapInUserErrorIfRecoverable(\n e,\n 'Initialization of waitForPendingWrites() operation failed'\n );\n callback.reject(firestoreError);\n }\n }\n\n /**\n * Triggers the callbacks that are waiting for this batch id to get acknowledged by server,\n * if there are any.\n */\n private triggerPendingWritesCallbacks(batchId: BatchId): void {\n (this.pendingWritesCallbacks.get(batchId) || []).forEach(callback => {\n callback.resolve();\n });\n\n this.pendingWritesCallbacks.delete(batchId);\n }\n\n /** Reject all outstanding callbacks waiting for pending writes to complete. */\n private rejectOutstandingPendingWritesCallbacks(errorMessage: string): void {\n this.pendingWritesCallbacks.forEach(callbacks => {\n callbacks.forEach(callback => {\n callback.reject(new FirestoreError(Code.CANCELLED, errorMessage));\n });\n });\n\n this.pendingWritesCallbacks.clear();\n }\n\n private addMutationCallback(\n batchId: BatchId,\n callback: Deferred<void>\n ): void {\n let newCallbacks = this.mutationUserCallbacks[this.currentUser.toKey()];\n if (!newCallbacks) {\n newCallbacks = new SortedMap<BatchId, Deferred<void>>(\n primitiveComparator\n );\n }\n newCallbacks = newCallbacks.insert(batchId, callback);\n this.mutationUserCallbacks[this.currentUser.toKey()] = newCallbacks;\n }\n\n /**\n * Resolves or rejects the user callback for the given batch and then discards\n * it.\n */\n protected processUserCallback(batchId: BatchId, error: Error | null): void {\n let newCallbacks = this.mutationUserCallbacks[this.currentUser.toKey()];\n\n // NOTE: Mutations restored from persistence won't have callbacks, so it's\n // okay for there to be no callback for this ID.\n if (newCallbacks) {\n const callback = newCallbacks.get(batchId);\n if (callback) {\n debugAssert(\n batchId === newCallbacks.minKey(),\n 'Mutation callbacks processed out-of-order?'\n );\n if (error) {\n callback.reject(error);\n } else {\n callback.resolve();\n }\n newCallbacks = newCallbacks.remove(batchId);\n }\n this.mutationUserCallbacks[this.currentUser.toKey()] = newCallbacks;\n }\n }\n\n protected removeAndCleanupTarget(\n targetId: number,\n error: Error | null = null\n ): void {\n this.sharedClientState.removeLocalQueryTarget(targetId);\n\n debugAssert(\n this.queriesByTarget.has(targetId) &&\n this.queriesByTarget.get(targetId)!.length !== 0,\n `There are no queries mapped to target id ${targetId}`\n );\n\n for (const query of this.queriesByTarget.get(targetId)!) {\n this.queryViewsByQuery.delete(query);\n if (error) {\n this.syncEngineListener!.onWatchError(query, error);\n }\n }\n\n this.queriesByTarget.delete(targetId);\n\n if (this.isPrimaryClient) {\n const limboKeys = this.limboDocumentRefs.removeReferencesForId(targetId);\n limboKeys.forEach(limboKey => {\n const isReferenced = this.limboDocumentRefs.containsKey(limboKey);\n if (!isReferenced) {\n // We removed the last reference for this key\n this.removeLimboTarget(limboKey);\n }\n });\n }\n }\n\n private removeLimboTarget(key: DocumentKey): void {\n // It's possible that the target already got removed because the query failed. In that case,\n // the key won't exist in `limboTargetsByKey`. Only do the cleanup if we still have the target.\n const limboTargetId = this.activeLimboTargetsByKey.get(key);\n if (limboTargetId === null) {\n // This target already got removed, because the query failed.\n return;\n }\n\n this.remoteStore.unlisten(limboTargetId);\n this.activeLimboTargetsByKey = this.activeLimboTargetsByKey.remove(key);\n this.activeLimboResolutionsByTarget.delete(limboTargetId);\n this.pumpEnqueuedLimboResolutions();\n }\n\n protected updateTrackedLimbos(\n targetId: TargetId,\n limboChanges: LimboDocumentChange[]\n ): void {\n for (const limboChange of limboChanges) {\n if (limboChange instanceof AddedLimboDocument) {\n this.limboDocumentRefs.addReference(limboChange.key, targetId);\n this.trackLimboChange(limboChange);\n } else if (limboChange instanceof RemovedLimboDocument) {\n logDebug(LOG_TAG, 'Document no longer in limbo: ' + limboChange.key);\n this.limboDocumentRefs.removeReference(limboChange.key, targetId);\n const isReferenced = this.limboDocumentRefs.containsKey(\n limboChange.key\n );\n if (!isReferenced) {\n // We removed the last reference for this key\n this.removeLimboTarget(limboChange.key);\n }\n } else {\n fail('Unknown limbo change: ' + JSON.stringify(limboChange));\n }\n }\n }\n\n private trackLimboChange(limboChange: AddedLimboDocument): void {\n const key = limboChange.key;\n if (!this.activeLimboTargetsByKey.get(key)) {\n logDebug(LOG_TAG, 'New document in limbo: ' + key);\n this.enqueuedLimboResolutions.push(key);\n this.pumpEnqueuedLimboResolutions();\n }\n }\n\n /**\n * Starts listens for documents in limbo that are enqueued for resolution,\n * subject to a maximum number of concurrent resolutions.\n *\n * Without bounding the number of concurrent resolutions, the server can fail\n * with \"resource exhausted\" errors which can lead to pathological client\n * behavior as seen in https://github.com/firebase/firebase-js-sdk/issues/2683.\n */\n private pumpEnqueuedLimboResolutions(): void {\n while (\n this.enqueuedLimboResolutions.length > 0 &&\n this.activeLimboTargetsByKey.size < this.maxConcurrentLimboResolutions\n ) {\n const key = this.enqueuedLimboResolutions.shift()!;\n const limboTargetId = this.limboTargetIdGenerator.next();\n this.activeLimboResolutionsByTarget.set(\n limboTargetId,\n new LimboResolution(key)\n );\n this.activeLimboTargetsByKey = this.activeLimboTargetsByKey.insert(\n key,\n limboTargetId\n );\n this.remoteStore.listen(\n new TargetData(\n Query.atPath(key.path).toTarget(),\n limboTargetId,\n TargetPurpose.LimboResolution,\n ListenSequence.INVALID\n )\n );\n }\n }\n\n // Visible for testing\n activeLimboDocumentResolutions(): SortedMap<DocumentKey, TargetId> {\n return this.activeLimboTargetsByKey;\n }\n\n // Visible for testing\n enqueuedLimboDocumentResolutions(): DocumentKey[] {\n return this.enqueuedLimboResolutions;\n }\n\n protected async emitNewSnapsAndNotifyLocalStore(\n changes: MaybeDocumentMap,\n remoteEvent?: RemoteEvent\n ): Promise<void> {\n const newSnaps: ViewSnapshot[] = [];\n const docChangesInAllViews: LocalViewChanges[] = [];\n const queriesProcessed: Array<Promise<void>> = [];\n\n this.queryViewsByQuery.forEach((_, queryView) => {\n queriesProcessed.push(\n Promise.resolve()\n .then(() => {\n const viewDocChanges = queryView.view.computeDocChanges(changes);\n if (!viewDocChanges.needsRefill) {\n return viewDocChanges;\n }\n // The query has a limit and some docs were removed, so we need\n // to re-run the query against the local store to make sure we\n // didn't lose any good docs that had been past the limit.\n return this.localStore\n .executeQuery(queryView.query, /* usePreviousResults= */ false)\n .then(({ documents }) => {\n return queryView.view.computeDocChanges(\n documents,\n viewDocChanges\n );\n });\n })\n .then((viewDocChanges: ViewDocumentChanges) => {\n const targetChange =\n remoteEvent && remoteEvent.targetChanges.get(queryView.targetId);\n const viewChange = queryView.view.applyChanges(\n viewDocChanges,\n /* updateLimboDocuments= */ this.isPrimaryClient,\n targetChange\n );\n this.updateTrackedLimbos(\n queryView.targetId,\n viewChange.limboChanges\n );\n if (viewChange.snapshot) {\n if (this.isPrimaryClient) {\n this.sharedClientState.updateQueryState(\n queryView.targetId,\n viewChange.snapshot.fromCache ? 'not-current' : 'current'\n );\n }\n\n newSnaps.push(viewChange.snapshot);\n const docChanges = LocalViewChanges.fromSnapshot(\n queryView.targetId,\n viewChange.snapshot\n );\n docChangesInAllViews.push(docChanges);\n }\n })\n );\n });\n\n await Promise.all(queriesProcessed);\n this.syncEngineListener!.onWatchChange(newSnaps);\n await this.localStore.notifyLocalViewChanges(docChangesInAllViews);\n }\n\n protected assertSubscribed(fnName: string): void {\n debugAssert(\n this.syncEngineListener !== null,\n 'Trying to call ' + fnName + ' before calling subscribe().'\n );\n }\n\n async handleCredentialChange(user: User): Promise<void> {\n const userChanged = !this.currentUser.isEqual(user);\n\n if (userChanged) {\n const result = await this.localStore.handleUserChange(user);\n this.currentUser = user;\n\n // Fails tasks waiting for pending writes requested by previous user.\n this.rejectOutstandingPendingWritesCallbacks(\n \"'waitForPendingWrites' promise is rejected due to a user change.\"\n );\n // TODO(b/114226417): Consider calling this only in the primary tab.\n this.sharedClientState.handleUserChange(\n user,\n result.removedBatchIds,\n result.addedBatchIds\n );\n await this.emitNewSnapsAndNotifyLocalStore(result.affectedDocuments);\n }\n\n await this.remoteStore.handleCredentialChange();\n }\n\n enableNetwork(): Promise<void> {\n return this.remoteStore.enableNetwork();\n }\n\n disableNetwork(): Promise<void> {\n return this.remoteStore.disableNetwork();\n }\n\n getRemoteKeysForTarget(targetId: TargetId): DocumentKeySet {\n const limboResolution = this.activeLimboResolutionsByTarget.get(targetId);\n if (limboResolution && limboResolution.receivedDocument) {\n return documentKeySet().add(limboResolution.key);\n } else {\n let keySet = documentKeySet();\n const queries = this.queriesByTarget.get(targetId);\n if (!queries) {\n return keySet;\n }\n for (const query of queries) {\n const queryView = this.queryViewsByQuery.get(query);\n debugAssert(!!queryView, `No query view found for ${query}`);\n keySet = keySet.unionWith(queryView.view.syncedDocuments);\n }\n return keySet;\n }\n }\n}\n\n/**\n * An impplementation of SyncEngine that implement SharedClientStateSyncer for\n * Multi-Tab synchronization.\n */\n// PORTING NOTE: Web only\nexport class MultiTabSyncEngine extends SyncEngine\n implements SharedClientStateSyncer {\n // The primary state is set to `true` or `false` immediately after Firestore\n // startup. In the interim, a client should only be considered primary if\n // `isPrimary` is true.\n private _isPrimaryClient: undefined | boolean = undefined;\n\n constructor(\n protected localStore: MultiTabLocalStore,\n remoteStore: RemoteStore,\n sharedClientState: SharedClientState,\n currentUser: User,\n maxConcurrentLimboResolutions: number\n ) {\n super(\n localStore,\n remoteStore,\n sharedClientState,\n currentUser,\n maxConcurrentLimboResolutions\n );\n }\n\n get isPrimaryClient(): boolean {\n return this._isPrimaryClient === true;\n }\n\n enableNetwork(): Promise<void> {\n this.localStore.setNetworkEnabled(true);\n return super.enableNetwork();\n }\n\n disableNetwork(): Promise<void> {\n this.localStore.setNetworkEnabled(false);\n return super.disableNetwork();\n }\n\n /**\n * Reconcile the list of synced documents in an existing view with those\n * from persistence.\n */\n private async synchronizeViewAndComputeSnapshot(\n queryView: QueryView\n ): Promise<ViewChange> {\n const queryResult = await this.localStore.executeQuery(\n queryView.query,\n /* usePreviousResults= */ true\n );\n const viewSnapshot = queryView.view.synchronizeWithPersistedState(\n queryResult\n );\n if (this._isPrimaryClient) {\n this.updateTrackedLimbos(queryView.targetId, viewSnapshot.limboChanges);\n }\n return viewSnapshot;\n }\n\n applyOnlineStateChange(\n onlineState: OnlineState,\n source: OnlineStateSource\n ): void {\n // If we are the primary client, the online state of all clients only\n // depends on the online state of the local RemoteStore.\n if (this.isPrimaryClient && source === OnlineStateSource.RemoteStore) {\n super.applyOnlineStateChange(onlineState, source);\n this.sharedClientState.setOnlineState(onlineState);\n }\n\n // If we are the secondary client, we explicitly ignore the remote store's\n // online state (the local client may go offline, even though the primary\n // tab remains online) and only apply the primary tab's online state from\n // SharedClientState.\n if (\n !this.isPrimaryClient &&\n source === OnlineStateSource.SharedClientState\n ) {\n super.applyOnlineStateChange(onlineState, source);\n }\n }\n\n async applyBatchState(\n batchId: BatchId,\n batchState: MutationBatchState,\n error?: FirestoreError\n ): Promise<void> {\n this.assertSubscribed('applyBatchState()');\n const documents = await this.localStore.lookupMutationDocuments(batchId);\n\n if (documents === null) {\n // A throttled tab may not have seen the mutation before it was completed\n // and removed from the mutation queue, in which case we won't have cached\n // the affected documents. In this case we can safely ignore the update\n // since that means we didn't apply the mutation locally at all (if we\n // had, we would have cached the affected documents), and so we will just\n // see any resulting document changes via normal remote document updates\n // as applicable.\n logDebug(LOG_TAG, 'Cannot apply mutation batch with id: ' + batchId);\n return;\n }\n\n if (batchState === 'pending') {\n // If we are the primary client, we need to send this write to the\n // backend. Secondary clients will ignore these writes since their remote\n // connection is disabled.\n await this.remoteStore.fillWritePipeline();\n } else if (batchState === 'acknowledged' || batchState === 'rejected') {\n // NOTE: Both these methods are no-ops for batches that originated from\n // other clients.\n this.processUserCallback(batchId, error ? error : null);\n this.localStore.removeCachedMutationBatchMetadata(batchId);\n } else {\n fail(`Unknown batchState: ${batchState}`);\n }\n\n await this.emitNewSnapsAndNotifyLocalStore(documents);\n }\n\n async applyPrimaryState(isPrimary: boolean): Promise<void> {\n if (isPrimary === true && this._isPrimaryClient !== true) {\n // Secondary tabs only maintain Views for their local listeners and the\n // Views internal state may not be 100% populated (in particular\n // secondary tabs don't track syncedDocuments, the set of documents the\n // server considers to be in the target). So when a secondary becomes\n // primary, we need to need to make sure that all views for all targets\n // match the state on disk.\n const activeTargets = this.sharedClientState.getAllActiveQueryTargets();\n const activeQueries = await this.synchronizeQueryViewsAndRaiseSnapshots(\n activeTargets.toArray(),\n /*transitionToPrimary=*/ true\n );\n this._isPrimaryClient = true;\n await this.remoteStore.applyPrimaryState(true);\n for (const targetData of activeQueries) {\n this.remoteStore.listen(targetData);\n }\n } else if (isPrimary === false && this._isPrimaryClient !== false) {\n const activeTargets: TargetId[] = [];\n\n let p = Promise.resolve();\n this.queriesByTarget.forEach((_, targetId) => {\n if (this.sharedClientState.isLocalQueryTarget(targetId)) {\n activeTargets.push(targetId);\n } else {\n p = p.then(() => {\n this.removeAndCleanupTarget(targetId);\n return this.localStore.releaseTarget(\n targetId,\n /*keepPersistedTargetData=*/ true\n );\n });\n }\n this.remoteStore.unlisten(targetId);\n });\n await p;\n\n await this.synchronizeQueryViewsAndRaiseSnapshots(\n activeTargets,\n /*transitionToPrimary=*/ false\n );\n this.resetLimboDocuments();\n this._isPrimaryClient = false;\n await this.remoteStore.applyPrimaryState(false);\n }\n }\n\n private resetLimboDocuments(): void {\n this.activeLimboResolutionsByTarget.forEach((_, targetId) => {\n this.remoteStore.unlisten(targetId);\n });\n this.limboDocumentRefs.removeAllReferences();\n this.activeLimboResolutionsByTarget = new Map<TargetId, LimboResolution>();\n this.activeLimboTargetsByKey = new SortedMap<DocumentKey, TargetId>(\n DocumentKey.comparator\n );\n }\n\n /**\n * Reconcile the query views of the provided query targets with the state from\n * persistence. Raises snapshots for any changes that affect the local\n * client and returns the updated state of all target's query data.\n *\n * @param targets the list of targets with views that need to be recomputed\n * @param transitionToPrimary `true` iff the tab transitions from a secondary\n * tab to a primary tab\n */\n private async synchronizeQueryViewsAndRaiseSnapshots(\n targets: TargetId[],\n transitionToPrimary: boolean\n ): Promise<TargetData[]> {\n const activeQueries: TargetData[] = [];\n const newViewSnapshots: ViewSnapshot[] = [];\n for (const targetId of targets) {\n let targetData: TargetData;\n const queries = this.queriesByTarget.get(targetId);\n\n if (queries && queries.length !== 0) {\n // For queries that have a local View, we need to update their state\n // in LocalStore (as the resume token and the snapshot version\n // might have changed) and reconcile their views with the persisted\n // state (the list of syncedDocuments may have gotten out of sync).\n await this.localStore.releaseTarget(\n targetId,\n /*keepPersistedTargetData=*/ true\n );\n targetData = await this.localStore.allocateTarget(\n queries[0].toTarget()\n );\n\n for (const query of queries) {\n const queryView = this.queryViewsByQuery.get(query);\n debugAssert(!!queryView, `No query view found for ${query}`);\n\n const viewChange = await this.synchronizeViewAndComputeSnapshot(\n queryView\n );\n if (viewChange.snapshot) {\n newViewSnapshots.push(viewChange.snapshot);\n }\n }\n } else {\n debugAssert(\n transitionToPrimary,\n 'A secondary tab should never have an active target without an active query.'\n );\n // For queries that never executed on this client, we need to\n // allocate the target in LocalStore and initialize a new View.\n const target = await this.localStore.getTarget(targetId);\n debugAssert(!!target, `Target for id ${targetId} not found`);\n targetData = await this.localStore.allocateTarget(target);\n await this.initializeViewAndComputeSnapshot(\n this.synthesizeTargetToQuery(target!),\n targetId,\n /*current=*/ false\n );\n }\n\n activeQueries.push(targetData!);\n }\n\n this.syncEngineListener!.onWatchChange(newViewSnapshots);\n return activeQueries;\n }\n\n /**\n * Creates a `Query` object from the specified `Target`. There is no way to\n * obtain the original `Query`, so we synthesize a `Query` from the `Target`\n * object.\n *\n * The synthesized result might be different from the original `Query`, but\n * since the synthesized `Query` should return the same results as the\n * original one (only the presentation of results might differ), the potential\n * difference will not cause issues.\n */\n private synthesizeTargetToQuery(target: Target): Query {\n return new Query(\n target.path,\n target.collectionGroup,\n target.orderBy,\n target.filters,\n target.limit,\n LimitType.First,\n target.startAt,\n target.endAt\n );\n }\n\n getActiveClients(): Promise<ClientId[]> {\n return this.localStore.getActiveClients();\n }\n\n async applyTargetState(\n targetId: TargetId,\n state: QueryTargetState,\n error?: FirestoreError\n ): Promise<void> {\n if (this._isPrimaryClient) {\n // If we receive a target state notification via WebStorage, we are\n // either already secondary or another tab has taken the primary lease.\n logDebug(LOG_TAG, 'Ignoring unexpected query state notification.');\n return;\n }\n\n if (this.queriesByTarget.has(targetId)) {\n switch (state) {\n case 'current':\n case 'not-current': {\n const changes = await this.localStore.getNewDocumentChanges();\n const synthesizedRemoteEvent = RemoteEvent.createSynthesizedRemoteEventForCurrentChange(\n targetId,\n state === 'current'\n );\n await this.emitNewSnapsAndNotifyLocalStore(\n changes,\n synthesizedRemoteEvent\n );\n break;\n }\n case 'rejected': {\n await this.localStore.releaseTarget(\n targetId,\n /* keepPersistedTargetData */ true\n );\n this.removeAndCleanupTarget(targetId, error);\n break;\n }\n default:\n fail('Unexpected target state: ' + state);\n }\n }\n }\n\n async applyActiveTargetsChange(\n added: TargetId[],\n removed: TargetId[]\n ): Promise<void> {\n if (!this._isPrimaryClient) {\n return;\n }\n\n for (const targetId of added) {\n if (this.queriesByTarget.has(targetId)) {\n // A target might have been added in a previous attempt\n logDebug(LOG_TAG, 'Adding an already active target ' + targetId);\n continue;\n }\n\n const target = await this.localStore.getTarget(targetId);\n debugAssert(\n !!target,\n `Query data for active target ${targetId} not found`\n );\n const targetData = await this.localStore.allocateTarget(target);\n await this.initializeViewAndComputeSnapshot(\n this.synthesizeTargetToQuery(target),\n targetData.targetId,\n /*current=*/ false\n );\n this.remoteStore.listen(targetData);\n }\n\n for (const targetId of removed) {\n // Check that the target is still active since the target might have been\n // removed if it has been rejected by the backend.\n if (!this.queriesByTarget.has(targetId)) {\n continue;\n }\n\n // Release queries that are still active.\n await this.localStore\n .releaseTarget(targetId, /* keepPersistedTargetData */ false)\n .then(() => {\n this.remoteStore.unlisten(targetId);\n this.removeAndCleanupTarget(targetId);\n })\n .catch(ignoreIfPrimaryLeaseLoss);\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { debugAssert } from '../util/assert';\nimport { EventHandler } from '../util/misc';\nimport { ObjectMap } from '../util/obj_map';\nimport { Query } from './query';\nimport { SyncEngine, SyncEngineListener } from './sync_engine';\nimport { OnlineState } from './types';\nimport { ChangeType, DocumentViewChange, ViewSnapshot } from './view_snapshot';\nimport { wrapInUserErrorIfRecoverable } from '../util/async_queue';\n\n/**\n * Holds the listeners and the last received ViewSnapshot for a query being\n * tracked by EventManager.\n */\nclass QueryListenersInfo {\n viewSnap: ViewSnapshot | undefined = undefined;\n listeners: QueryListener[] = [];\n}\n\n/**\n * Interface for handling events from the EventManager.\n */\nexport interface Observer<T> {\n next: EventHandler<T>;\n error: EventHandler<Error>;\n}\n\n/**\n * EventManager is responsible for mapping queries to query event emitters.\n * It handles \"fan-out\". -- Identical queries will re-use the same watch on the\n * backend.\n */\nexport class EventManager implements SyncEngineListener {\n private queries = new ObjectMap<Query, QueryListenersInfo>(q =>\n q.canonicalId()\n );\n\n private onlineState = OnlineState.Unknown;\n\n private snapshotsInSyncListeners: Set<Observer<void>> = new Set();\n\n constructor(private syncEngine: SyncEngine) {\n this.syncEngine.subscribe(this);\n }\n\n async listen(listener: QueryListener): Promise<void> {\n const query = listener.query;\n let firstListen = false;\n\n let queryInfo = this.queries.get(query);\n if (!queryInfo) {\n firstListen = true;\n queryInfo = new QueryListenersInfo();\n }\n\n if (firstListen) {\n try {\n queryInfo.viewSnap = await this.syncEngine.listen(query);\n } catch (e) {\n const firestoreError = wrapInUserErrorIfRecoverable(\n e,\n `Initialization of query '${listener.query}' failed`\n );\n listener.onError(firestoreError);\n return;\n }\n }\n\n this.queries.set(query, queryInfo);\n queryInfo.listeners.push(listener);\n\n // Run global snapshot listeners if a consistent snapshot has been emitted.\n const raisedEvent = listener.applyOnlineStateChange(this.onlineState);\n debugAssert(\n !raisedEvent,\n \"applyOnlineStateChange() shouldn't raise an event for brand-new listeners.\"\n );\n\n if (queryInfo.viewSnap) {\n const raisedEvent = listener.onViewSnapshot(queryInfo.viewSnap);\n if (raisedEvent) {\n this.raiseSnapshotsInSyncEvent();\n }\n }\n }\n\n async unlisten(listener: QueryListener): Promise<void> {\n const query = listener.query;\n let lastListen = false;\n\n const queryInfo = this.queries.get(query);\n if (queryInfo) {\n const i = queryInfo.listeners.indexOf(listener);\n if (i >= 0) {\n queryInfo.listeners.splice(i, 1);\n lastListen = queryInfo.listeners.length === 0;\n }\n }\n\n if (lastListen) {\n this.queries.delete(query);\n return this.syncEngine.unlisten(query);\n }\n }\n\n onWatchChange(viewSnaps: ViewSnapshot[]): void {\n let raisedEvent = false;\n for (const viewSnap of viewSnaps) {\n const query = viewSnap.query;\n const queryInfo = this.queries.get(query);\n if (queryInfo) {\n for (const listener of queryInfo.listeners) {\n if (listener.onViewSnapshot(viewSnap)) {\n raisedEvent = true;\n }\n }\n queryInfo.viewSnap = viewSnap;\n }\n }\n if (raisedEvent) {\n this.raiseSnapshotsInSyncEvent();\n }\n }\n\n onWatchError(query: Query, error: Error): void {\n const queryInfo = this.queries.get(query);\n if (queryInfo) {\n for (const listener of queryInfo.listeners) {\n listener.onError(error);\n }\n }\n\n // Remove all listeners. NOTE: We don't need to call syncEngine.unlisten()\n // after an error.\n this.queries.delete(query);\n }\n\n onOnlineStateChange(onlineState: OnlineState): void {\n this.onlineState = onlineState;\n let raisedEvent = false;\n this.queries.forEach((_, queryInfo) => {\n for (const listener of queryInfo.listeners) {\n // Run global snapshot listeners if a consistent snapshot has been emitted.\n if (listener.applyOnlineStateChange(onlineState)) {\n raisedEvent = true;\n }\n }\n });\n if (raisedEvent) {\n this.raiseSnapshotsInSyncEvent();\n }\n }\n\n addSnapshotsInSyncListener(observer: Observer<void>): void {\n this.snapshotsInSyncListeners.add(observer);\n // Immediately fire an initial event, indicating all existing listeners\n // are in-sync.\n observer.next();\n }\n\n removeSnapshotsInSyncListener(observer: Observer<void>): void {\n this.snapshotsInSyncListeners.delete(observer);\n }\n\n // Call all global snapshot listeners that have been set.\n private raiseSnapshotsInSyncEvent(): void {\n this.snapshotsInSyncListeners.forEach(observer => {\n observer.next();\n });\n }\n}\n\nexport interface ListenOptions {\n /** Raise events even when only the metadata changes */\n readonly includeMetadataChanges?: boolean;\n\n /**\n * Wait for a sync with the server when online, but still raise events while\n * offline.\n */\n readonly waitForSyncWhenOnline?: boolean;\n}\n\n/**\n * QueryListener takes a series of internal view snapshots and determines\n * when to raise the event.\n *\n * It uses an Observer to dispatch events.\n */\nexport class QueryListener {\n /**\n * Initial snapshots (e.g. from cache) may not be propagated to the wrapped\n * observer. This flag is set to true once we've actually raised an event.\n */\n private raisedInitialEvent = false;\n\n private options: ListenOptions;\n\n private snap: ViewSnapshot | null = null;\n\n private onlineState = OnlineState.Unknown;\n\n constructor(\n readonly query: Query,\n private queryObserver: Observer<ViewSnapshot>,\n options?: ListenOptions\n ) {\n this.options = options || {};\n }\n\n /**\n * Applies the new ViewSnapshot to this listener, raising a user-facing event\n * if applicable (depending on what changed, whether the user has opted into\n * metadata-only changes, etc.). Returns true if a user-facing event was\n * indeed raised.\n */\n onViewSnapshot(snap: ViewSnapshot): boolean {\n debugAssert(\n snap.docChanges.length > 0 || snap.syncStateChanged,\n 'We got a new snapshot with no changes?'\n );\n\n if (!this.options.includeMetadataChanges) {\n // Remove the metadata only changes.\n const docChanges: DocumentViewChange[] = [];\n for (const docChange of snap.docChanges) {\n if (docChange.type !== ChangeType.Metadata) {\n docChanges.push(docChange);\n }\n }\n snap = new ViewSnapshot(\n snap.query,\n snap.docs,\n snap.oldDocs,\n docChanges,\n snap.mutatedKeys,\n snap.fromCache,\n snap.syncStateChanged,\n /* excludesMetadataChanges= */ true\n );\n }\n let raisedEvent = false;\n if (!this.raisedInitialEvent) {\n if (this.shouldRaiseInitialEvent(snap, this.onlineState)) {\n this.raiseInitialEvent(snap);\n raisedEvent = true;\n }\n } else if (this.shouldRaiseEvent(snap)) {\n this.queryObserver.next(snap);\n raisedEvent = true;\n }\n\n this.snap = snap;\n return raisedEvent;\n }\n\n onError(error: Error): void {\n this.queryObserver.error(error);\n }\n\n /** Returns whether a snapshot was raised. */\n applyOnlineStateChange(onlineState: OnlineState): boolean {\n this.onlineState = onlineState;\n let raisedEvent = false;\n if (\n this.snap &&\n !this.raisedInitialEvent &&\n this.shouldRaiseInitialEvent(this.snap, onlineState)\n ) {\n this.raiseInitialEvent(this.snap);\n raisedEvent = true;\n }\n return raisedEvent;\n }\n\n private shouldRaiseInitialEvent(\n snap: ViewSnapshot,\n onlineState: OnlineState\n ): boolean {\n debugAssert(\n !this.raisedInitialEvent,\n 'Determining whether to raise first event but already had first event'\n );\n\n // Always raise the first event when we're synced\n if (!snap.fromCache) {\n return true;\n }\n\n // NOTE: We consider OnlineState.Unknown as online (it should become Offline\n // or Online if we wait long enough).\n const maybeOnline = onlineState !== OnlineState.Offline;\n // Don't raise the event if we're online, aren't synced yet (checked\n // above) and are waiting for a sync.\n if (this.options.waitForSyncWhenOnline && maybeOnline) {\n debugAssert(\n snap.fromCache,\n 'Waiting for sync, but snapshot is not from cache'\n );\n return false;\n }\n\n // Raise data from cache if we have any documents or we are offline\n return !snap.docs.isEmpty() || onlineState === OnlineState.Offline;\n }\n\n private shouldRaiseEvent(snap: ViewSnapshot): boolean {\n // We don't need to handle includeDocumentMetadataChanges here because\n // the Metadata only changes have already been stripped out if needed.\n // At this point the only changes we will see are the ones we should\n // propagate.\n if (snap.docChanges.length > 0) {\n return true;\n }\n\n const hasPendingWritesChanged =\n this.snap && this.snap.hasPendingWrites !== snap.hasPendingWrites;\n if (snap.syncStateChanged || hasPendingWritesChanged) {\n return this.options.includeMetadataChanges === true;\n }\n\n // Generally we should have hit one of the cases above, but it's possible\n // to get here if there were only metadata docChanges and they got\n // stripped out.\n return false;\n }\n\n private raiseInitialEvent(snap: ViewSnapshot): void {\n debugAssert(\n !this.raisedInitialEvent,\n 'Trying to raise initial events for second time'\n );\n snap = ViewSnapshot.fromInitialDocuments(\n snap.query,\n snap.docs,\n snap.mutatedKeys,\n snap.fromCache\n );\n this.raisedInitialEvent = true;\n this.queryObserver.next(snap);\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { QueryEngine } from './query_engine';\nimport { LocalDocumentsView } from './local_documents_view';\nimport { PersistenceTransaction } from './persistence';\nimport { PersistencePromise } from './persistence_promise';\nimport { LimitType, Query } from '../core/query';\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport {\n DocumentKeySet,\n DocumentMap,\n MaybeDocumentMap\n} from '../model/collections';\nimport { Document } from '../model/document';\nimport { debugAssert } from '../util/assert';\nimport { getLogLevel, LogLevel, logDebug } from '../util/log';\nimport { SortedSet } from '../util/sorted_set';\n\n// TOOD(b/140938512): Drop SimpleQueryEngine and rename IndexFreeQueryEngine.\n\n/**\n * A query engine that takes advantage of the target document mapping in the\n * QueryCache. The IndexFreeQueryEngine optimizes query execution by only\n * reading the documents that previously matched a query plus any documents that were\n * edited after the query was last listened to.\n *\n * There are some cases where Index-Free queries are not guaranteed to produce\n * the same results as full collection scans. In these cases, the\n * IndexFreeQueryEngine falls back to full query processing. These cases are:\n *\n * - Limit queries where a document that matched the query previously no longer\n * matches the query.\n *\n * - Limit queries where a document edit may cause the document to sort below\n * another document that is in the local cache.\n *\n * - Queries that have never been CURRENT or free of Limbo documents.\n */\nexport class IndexFreeQueryEngine implements QueryEngine {\n private localDocumentsView: LocalDocumentsView | undefined;\n\n setLocalDocumentsView(localDocuments: LocalDocumentsView): void {\n this.localDocumentsView = localDocuments;\n }\n\n getDocumentsMatchingQuery(\n transaction: PersistenceTransaction,\n query: Query,\n lastLimboFreeSnapshotVersion: SnapshotVersion,\n remoteKeys: DocumentKeySet\n ): PersistencePromise<DocumentMap> {\n debugAssert(\n this.localDocumentsView !== undefined,\n 'setLocalDocumentsView() not called'\n );\n\n // Queries that match all documents don't benefit from using\n // IndexFreeQueries. It is more efficient to scan all documents in a\n // collection, rather than to perform individual lookups.\n if (query.matchesAllDocuments()) {\n return this.executeFullCollectionScan(transaction, query);\n }\n\n // Queries that have never seen a snapshot without limbo free documents\n // should also be run as a full collection scan.\n if (lastLimboFreeSnapshotVersion.isEqual(SnapshotVersion.min())) {\n return this.executeFullCollectionScan(transaction, query);\n }\n\n return this.localDocumentsView!.getDocuments(transaction, remoteKeys).next(\n documents => {\n const previousResults = this.applyQuery(query, documents);\n\n if (\n (query.hasLimitToFirst() || query.hasLimitToLast()) &&\n this.needsRefill(\n query.limitType,\n previousResults,\n remoteKeys,\n lastLimboFreeSnapshotVersion\n )\n ) {\n return this.executeFullCollectionScan(transaction, query);\n }\n\n if (getLogLevel() <= LogLevel.DEBUG) {\n logDebug(\n 'IndexFreeQueryEngine',\n 'Re-using previous result from %s to execute query: %s',\n lastLimboFreeSnapshotVersion.toString(),\n query.toString()\n );\n }\n\n // Retrieve all results for documents that were updated since the last\n // limbo-document free remote snapshot.\n return this.localDocumentsView!.getDocumentsMatchingQuery(\n transaction,\n query,\n lastLimboFreeSnapshotVersion\n ).next(updatedResults => {\n // We merge `previousResults` into `updateResults`, since\n // `updateResults` is already a DocumentMap. If a document is\n // contained in both lists, then its contents are the same.\n previousResults.forEach(doc => {\n updatedResults = updatedResults.insert(doc.key, doc);\n });\n return updatedResults;\n });\n }\n );\n }\n\n /** Applies the query filter and sorting to the provided documents. */\n private applyQuery(\n query: Query,\n documents: MaybeDocumentMap\n ): SortedSet<Document> {\n // Sort the documents and re-apply the query filter since previously\n // matching documents do not necessarily still match the query.\n let queryResults = new SortedSet<Document>((d1, d2) =>\n query.docComparator(d1, d2)\n );\n documents.forEach((_, maybeDoc) => {\n if (maybeDoc instanceof Document && query.matches(maybeDoc)) {\n queryResults = queryResults.add(maybeDoc);\n }\n });\n return queryResults;\n }\n\n /**\n * Determines if a limit query needs to be refilled from cache, making it\n * ineligible for index-free execution.\n *\n * @param sortedPreviousResults The documents that matched the query when it\n * was last synchronized, sorted by the query's comparator.\n * @param remoteKeys The document keys that matched the query at the last\n * snapshot.\n * @param limboFreeSnapshotVersion The version of the snapshot when the query\n * was last synchronized.\n */\n private needsRefill(\n limitType: LimitType,\n sortedPreviousResults: SortedSet<Document>,\n remoteKeys: DocumentKeySet,\n limboFreeSnapshotVersion: SnapshotVersion\n ): boolean {\n // The query needs to be refilled if a previously matching document no\n // longer matches.\n if (remoteKeys.size !== sortedPreviousResults.size) {\n return true;\n }\n\n // Limit queries are not eligible for index-free query execution if there is\n // a potential that an older document from cache now sorts before a document\n // that was previously part of the limit. This, however, can only happen if\n // the document at the edge of the limit goes out of limit.\n // If a document that is not the limit boundary sorts differently,\n // the boundary of the limit itself did not change and documents from cache\n // will continue to be \"rejected\" by this boundary. Therefore, we can ignore\n // any modifications that don't affect the last document.\n const docAtLimitEdge =\n limitType === LimitType.First\n ? sortedPreviousResults.last()\n : sortedPreviousResults.first();\n if (!docAtLimitEdge) {\n // We don't need to refill the query if there were already no documents.\n return false;\n }\n return (\n docAtLimitEdge.hasPendingWrites ||\n docAtLimitEdge.version.compareTo(limboFreeSnapshotVersion) > 0\n );\n }\n\n private executeFullCollectionScan(\n transaction: PersistenceTransaction,\n query: Query\n ): PersistencePromise<DocumentMap> {\n if (getLogLevel() <= LogLevel.DEBUG) {\n logDebug(\n 'IndexFreeQueryEngine',\n 'Using full collection scan to execute query:',\n query.toString()\n );\n }\n\n return this.localDocumentsView!.getDocumentsMatchingQuery(\n transaction,\n query,\n SnapshotVersion.min()\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Timestamp } from '../api/timestamp';\nimport { Query } from '../core/query';\nimport { BatchId } from '../core/types';\nimport { DocumentKey } from '../model/document_key';\nimport { Mutation } from '../model/mutation';\nimport { MutationBatch, BATCHID_UNKNOWN } from '../model/mutation_batch';\nimport { debugAssert, hardAssert } from '../util/assert';\nimport { primitiveComparator } from '../util/misc';\nimport { ByteString } from '../util/byte_string';\nimport { SortedMap } from '../util/sorted_map';\nimport { SortedSet } from '../util/sorted_set';\n\nimport { IndexManager } from './index_manager';\nimport { MutationQueue } from './mutation_queue';\nimport { PersistenceTransaction, ReferenceDelegate } from './persistence';\nimport { PersistencePromise } from './persistence_promise';\nimport { DocReference } from './reference_set';\n\nexport class MemoryMutationQueue implements MutationQueue {\n /**\n * The set of all mutations that have been sent but not yet been applied to\n * the backend.\n */\n private mutationQueue: MutationBatch[] = [];\n\n /** Next value to use when assigning sequential IDs to each mutation batch. */\n private nextBatchId: BatchId = 1;\n\n /** The last received stream token from the server, used to acknowledge which\n * responses the client has processed. Stream tokens are opaque checkpoint\n * markers whose only real value is their inclusion in the next request.\n */\n private lastStreamToken: ByteString = ByteString.EMPTY_BYTE_STRING;\n\n /** An ordered mapping between documents and the mutations batch IDs. */\n private batchesByDocumentKey = new SortedSet(DocReference.compareByKey);\n\n constructor(\n private readonly indexManager: IndexManager,\n private readonly referenceDelegate: ReferenceDelegate\n ) {}\n\n checkEmpty(transaction: PersistenceTransaction): PersistencePromise<boolean> {\n return PersistencePromise.resolve(this.mutationQueue.length === 0);\n }\n\n acknowledgeBatch(\n transaction: PersistenceTransaction,\n batch: MutationBatch,\n streamToken: ByteString\n ): PersistencePromise<void> {\n const batchId = batch.batchId;\n const batchIndex = this.indexOfExistingBatchId(batchId, 'acknowledged');\n hardAssert(\n batchIndex === 0,\n 'Can only acknowledge the first batch in the mutation queue'\n );\n\n // Verify that the batch in the queue is the one to be acknowledged.\n const check = this.mutationQueue[batchIndex];\n debugAssert(\n batchId === check.batchId,\n 'Queue ordering failure: expected batch ' +\n batchId +\n ', got batch ' +\n check.batchId\n );\n\n this.lastStreamToken = streamToken;\n return PersistencePromise.resolve();\n }\n\n getLastStreamToken(\n transaction: PersistenceTransaction\n ): PersistencePromise<ByteString> {\n return PersistencePromise.resolve(this.lastStreamToken);\n }\n\n setLastStreamToken(\n transaction: PersistenceTransaction,\n streamToken: ByteString\n ): PersistencePromise<void> {\n this.lastStreamToken = streamToken;\n return PersistencePromise.resolve();\n }\n\n addMutationBatch(\n transaction: PersistenceTransaction,\n localWriteTime: Timestamp,\n baseMutations: Mutation[],\n mutations: Mutation[]\n ): PersistencePromise<MutationBatch> {\n debugAssert(mutations.length !== 0, 'Mutation batches should not be empty');\n\n const batchId = this.nextBatchId;\n this.nextBatchId++;\n\n if (this.mutationQueue.length > 0) {\n const prior = this.mutationQueue[this.mutationQueue.length - 1];\n debugAssert(\n prior.batchId < batchId,\n 'Mutation batchIDs must be monotonically increasing order'\n );\n }\n\n const batch = new MutationBatch(\n batchId,\n localWriteTime,\n baseMutations,\n mutations\n );\n this.mutationQueue.push(batch);\n\n // Track references by document key and index collection parents.\n for (const mutation of mutations) {\n this.batchesByDocumentKey = this.batchesByDocumentKey.add(\n new DocReference(mutation.key, batchId)\n );\n\n this.indexManager.addToCollectionParentIndex(\n transaction,\n mutation.key.path.popLast()\n );\n }\n\n return PersistencePromise.resolve(batch);\n }\n\n lookupMutationBatch(\n transaction: PersistenceTransaction,\n batchId: BatchId\n ): PersistencePromise<MutationBatch | null> {\n return PersistencePromise.resolve(this.findMutationBatch(batchId));\n }\n\n getNextMutationBatchAfterBatchId(\n transaction: PersistenceTransaction,\n batchId: BatchId\n ): PersistencePromise<MutationBatch | null> {\n const nextBatchId = batchId + 1;\n\n // The requested batchId may still be out of range so normalize it to the\n // start of the queue.\n const rawIndex = this.indexOfBatchId(nextBatchId);\n const index = rawIndex < 0 ? 0 : rawIndex;\n return PersistencePromise.resolve(\n this.mutationQueue.length > index ? this.mutationQueue[index] : null\n );\n }\n\n getHighestUnacknowledgedBatchId(): PersistencePromise<BatchId> {\n return PersistencePromise.resolve(\n this.mutationQueue.length === 0 ? BATCHID_UNKNOWN : this.nextBatchId - 1\n );\n }\n\n getAllMutationBatches(\n transaction: PersistenceTransaction\n ): PersistencePromise<MutationBatch[]> {\n return PersistencePromise.resolve(this.mutationQueue.slice());\n }\n\n getAllMutationBatchesAffectingDocumentKey(\n transaction: PersistenceTransaction,\n documentKey: DocumentKey\n ): PersistencePromise<MutationBatch[]> {\n const start = new DocReference(documentKey, 0);\n const end = new DocReference(documentKey, Number.POSITIVE_INFINITY);\n const result: MutationBatch[] = [];\n this.batchesByDocumentKey.forEachInRange([start, end], ref => {\n debugAssert(\n documentKey.isEqual(ref.key),\n \"Should only iterate over a single key's batches\"\n );\n const batch = this.findMutationBatch(ref.targetOrBatchId);\n debugAssert(\n batch !== null,\n 'Batches in the index must exist in the main table'\n );\n result.push(batch!);\n });\n\n return PersistencePromise.resolve(result);\n }\n\n getAllMutationBatchesAffectingDocumentKeys(\n transaction: PersistenceTransaction,\n documentKeys: SortedMap<DocumentKey, unknown>\n ): PersistencePromise<MutationBatch[]> {\n let uniqueBatchIDs = new SortedSet<number>(primitiveComparator);\n\n documentKeys.forEach(documentKey => {\n const start = new DocReference(documentKey, 0);\n const end = new DocReference(documentKey, Number.POSITIVE_INFINITY);\n this.batchesByDocumentKey.forEachInRange([start, end], ref => {\n debugAssert(\n documentKey.isEqual(ref.key),\n \"For each key, should only iterate over a single key's batches\"\n );\n\n uniqueBatchIDs = uniqueBatchIDs.add(ref.targetOrBatchId);\n });\n });\n\n return PersistencePromise.resolve(this.findMutationBatches(uniqueBatchIDs));\n }\n\n getAllMutationBatchesAffectingQuery(\n transaction: PersistenceTransaction,\n query: Query\n ): PersistencePromise<MutationBatch[]> {\n debugAssert(\n !query.isCollectionGroupQuery(),\n 'CollectionGroup queries should be handled in LocalDocumentsView'\n );\n // Use the query path as a prefix for testing if a document matches the\n // query.\n const prefix = query.path;\n const immediateChildrenPathLength = prefix.length + 1;\n\n // Construct a document reference for actually scanning the index. Unlike\n // the prefix the document key in this reference must have an even number of\n // segments. The empty segment can be used a suffix of the query path\n // because it precedes all other segments in an ordered traversal.\n let startPath = prefix;\n if (!DocumentKey.isDocumentKey(startPath)) {\n startPath = startPath.child('');\n }\n\n const start = new DocReference(new DocumentKey(startPath), 0);\n\n // Find unique batchIDs referenced by all documents potentially matching the\n // query.\n let uniqueBatchIDs = new SortedSet<number>(primitiveComparator);\n\n this.batchesByDocumentKey.forEachWhile(ref => {\n const rowKeyPath = ref.key.path;\n if (!prefix.isPrefixOf(rowKeyPath)) {\n return false;\n } else {\n // Rows with document keys more than one segment longer than the query\n // path can't be matches. For example, a query on 'rooms' can't match\n // the document /rooms/abc/messages/xyx.\n // TODO(mcg): we'll need a different scanner when we implement\n // ancestor queries.\n if (rowKeyPath.length === immediateChildrenPathLength) {\n uniqueBatchIDs = uniqueBatchIDs.add(ref.targetOrBatchId);\n }\n return true;\n }\n }, start);\n\n return PersistencePromise.resolve(this.findMutationBatches(uniqueBatchIDs));\n }\n\n private findMutationBatches(batchIDs: SortedSet<number>): MutationBatch[] {\n // Construct an array of matching batches, sorted by batchID to ensure that\n // multiple mutations affecting the same document key are applied in order.\n const result: MutationBatch[] = [];\n batchIDs.forEach(batchId => {\n const batch = this.findMutationBatch(batchId);\n if (batch !== null) {\n result.push(batch);\n }\n });\n return result;\n }\n\n removeMutationBatch(\n transaction: PersistenceTransaction,\n batch: MutationBatch\n ): PersistencePromise<void> {\n // Find the position of the first batch for removal.\n const batchIndex = this.indexOfExistingBatchId(batch.batchId, 'removed');\n hardAssert(\n batchIndex === 0,\n 'Can only remove the first entry of the mutation queue'\n );\n this.mutationQueue.shift();\n\n let references = this.batchesByDocumentKey;\n return PersistencePromise.forEach(batch.mutations, (mutation: Mutation) => {\n const ref = new DocReference(mutation.key, batch.batchId);\n references = references.delete(ref);\n return this.referenceDelegate.markPotentiallyOrphaned(\n transaction,\n mutation.key\n );\n }).next(() => {\n this.batchesByDocumentKey = references;\n });\n }\n\n removeCachedMutationKeys(batchId: BatchId): void {\n // No-op since the memory mutation queue does not maintain a separate cache.\n }\n\n containsKey(\n txn: PersistenceTransaction,\n key: DocumentKey\n ): PersistencePromise<boolean> {\n const ref = new DocReference(key, 0);\n const firstRef = this.batchesByDocumentKey.firstAfterOrEqual(ref);\n return PersistencePromise.resolve(key.isEqual(firstRef && firstRef.key));\n }\n\n performConsistencyCheck(\n txn: PersistenceTransaction\n ): PersistencePromise<void> {\n if (this.mutationQueue.length === 0) {\n debugAssert(\n this.batchesByDocumentKey.isEmpty(),\n 'Document leak -- detected dangling mutation references when queue is empty.'\n );\n }\n return PersistencePromise.resolve();\n }\n\n /**\n * Finds the index of the given batchId in the mutation queue and asserts that\n * the resulting index is within the bounds of the queue.\n *\n * @param batchId The batchId to search for\n * @param action A description of what the caller is doing, phrased in passive\n * form (e.g. \"acknowledged\" in a routine that acknowledges batches).\n */\n private indexOfExistingBatchId(batchId: BatchId, action: string): number {\n const index = this.indexOfBatchId(batchId);\n debugAssert(\n index >= 0 && index < this.mutationQueue.length,\n 'Batches must exist to be ' + action\n );\n return index;\n }\n\n /**\n * Finds the index of the given batchId in the mutation queue. This operation\n * is O(1).\n *\n * @return The computed index of the batch with the given batchId, based on\n * the state of the queue. Note this index can be negative if the requested\n * batchId has already been remvoed from the queue or past the end of the\n * queue if the batchId is larger than the last added batch.\n */\n private indexOfBatchId(batchId: BatchId): number {\n if (this.mutationQueue.length === 0) {\n // As an index this is past the end of the queue\n return 0;\n }\n\n // Examine the front of the queue to figure out the difference between the\n // batchId and indexes in the array. Note that since the queue is ordered\n // by batchId, if the first batch has a larger batchId then the requested\n // batchId doesn't exist in the queue.\n const firstBatchId = this.mutationQueue[0].batchId;\n return batchId - firstBatchId;\n }\n\n /**\n * A version of lookupMutationBatch that doesn't return a promise, this makes\n * other functions that uses this code easier to read and more efficent.\n */\n private findMutationBatch(batchId: BatchId): MutationBatch | null {\n const index = this.indexOfBatchId(batchId);\n if (index < 0 || index >= this.mutationQueue.length) {\n return null;\n }\n\n const batch = this.mutationQueue[index];\n debugAssert(batch.batchId === batchId, 'If found batch must match');\n return batch;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Query } from '../core/query';\nimport {\n DocumentKeySet,\n DocumentMap,\n documentMap,\n DocumentSizeEntry,\n NullableMaybeDocumentMap,\n nullableMaybeDocumentMap\n} from '../model/collections';\nimport { Document, MaybeDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\n\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { debugAssert } from '../util/assert';\nimport { SortedMap } from '../util/sorted_map';\nimport { IndexManager } from './index_manager';\nimport { PersistenceTransaction } from './persistence';\nimport { PersistencePromise } from './persistence_promise';\nimport { RemoteDocumentCache } from './remote_document_cache';\nimport { RemoteDocumentChangeBuffer } from './remote_document_change_buffer';\n\nexport type DocumentSizer = (doc: MaybeDocument) => number;\n\n/** Miscellaneous collection types / constants. */\ninterface MemoryRemoteDocumentCacheEntry extends DocumentSizeEntry {\n readTime: SnapshotVersion;\n}\n\ntype DocumentEntryMap = SortedMap<DocumentKey, MemoryRemoteDocumentCacheEntry>;\nfunction documentEntryMap(): DocumentEntryMap {\n return new SortedMap<DocumentKey, MemoryRemoteDocumentCacheEntry>(\n DocumentKey.comparator\n );\n}\n\nexport class MemoryRemoteDocumentCache implements RemoteDocumentCache {\n /** Underlying cache of documents and their read times. */\n private docs = documentEntryMap();\n\n /** Size of all cached documents. */\n private size = 0;\n\n /**\n * @param sizer Used to assess the size of a document. For eager GC, this is expected to just\n * return 0 to avoid unnecessarily doing the work of calculating the size.\n */\n constructor(\n private readonly indexManager: IndexManager,\n private readonly sizer: DocumentSizer\n ) {}\n\n /**\n * Adds the supplied entry to the cache and updates the cache size as appropriate.\n *\n * All calls of `addEntry` are required to go through the RemoteDocumentChangeBuffer\n * returned by `newChangeBuffer()`.\n */\n private addEntry(\n transaction: PersistenceTransaction,\n doc: MaybeDocument,\n readTime: SnapshotVersion\n ): PersistencePromise<void> {\n debugAssert(\n !readTime.isEqual(SnapshotVersion.min()),\n 'Cannot add a document with a read time of zero'\n );\n\n const key = doc.key;\n const entry = this.docs.get(key);\n const previousSize = entry ? entry.size : 0;\n const currentSize = this.sizer(doc);\n\n this.docs = this.docs.insert(key, {\n maybeDocument: doc,\n size: currentSize,\n readTime\n });\n\n this.size += currentSize - previousSize;\n\n return this.indexManager.addToCollectionParentIndex(\n transaction,\n key.path.popLast()\n );\n }\n\n /**\n * Removes the specified entry from the cache and updates the cache size as appropriate.\n *\n * All calls of `removeEntry` are required to go through the RemoteDocumentChangeBuffer\n * returned by `newChangeBuffer()`.\n */\n private removeEntry(documentKey: DocumentKey): void {\n const entry = this.docs.get(documentKey);\n if (entry) {\n this.docs = this.docs.remove(documentKey);\n this.size -= entry.size;\n }\n }\n\n getEntry(\n transaction: PersistenceTransaction,\n documentKey: DocumentKey\n ): PersistencePromise<MaybeDocument | null> {\n const entry = this.docs.get(documentKey);\n return PersistencePromise.resolve(entry ? entry.maybeDocument : null);\n }\n\n getEntries(\n transaction: PersistenceTransaction,\n documentKeys: DocumentKeySet\n ): PersistencePromise<NullableMaybeDocumentMap> {\n let results = nullableMaybeDocumentMap();\n documentKeys.forEach(documentKey => {\n const entry = this.docs.get(documentKey);\n results = results.insert(documentKey, entry ? entry.maybeDocument : null);\n });\n return PersistencePromise.resolve(results);\n }\n\n getDocumentsMatchingQuery(\n transaction: PersistenceTransaction,\n query: Query,\n sinceReadTime: SnapshotVersion\n ): PersistencePromise<DocumentMap> {\n debugAssert(\n !query.isCollectionGroupQuery(),\n 'CollectionGroup queries should be handled in LocalDocumentsView'\n );\n let results = documentMap();\n\n // Documents are ordered by key, so we can use a prefix scan to narrow down\n // the documents we need to match the query against.\n const prefix = new DocumentKey(query.path.child(''));\n const iterator = this.docs.getIteratorFrom(prefix);\n while (iterator.hasNext()) {\n const {\n key,\n value: { maybeDocument, readTime }\n } = iterator.getNext();\n if (!query.path.isPrefixOf(key.path)) {\n break;\n }\n if (readTime.compareTo(sinceReadTime) <= 0) {\n continue;\n }\n if (maybeDocument instanceof Document && query.matches(maybeDocument)) {\n results = results.insert(maybeDocument.key, maybeDocument);\n }\n }\n return PersistencePromise.resolve(results);\n }\n\n forEachDocumentKey(\n transaction: PersistenceTransaction,\n f: (key: DocumentKey) => PersistencePromise<void>\n ): PersistencePromise<void> {\n return PersistencePromise.forEach(this.docs, (key: DocumentKey) => f(key));\n }\n\n newChangeBuffer(options?: {\n trackRemovals: boolean;\n }): RemoteDocumentChangeBuffer {\n // `trackRemovals` is ignores since the MemoryRemoteDocumentCache keeps\n // a separate changelog and does not need special handling for removals.\n return new MemoryRemoteDocumentCache.RemoteDocumentChangeBuffer(this);\n }\n\n getSize(txn: PersistenceTransaction): PersistencePromise<number> {\n return PersistencePromise.resolve(this.size);\n }\n\n /**\n * Handles the details of adding and updating documents in the MemoryRemoteDocumentCache.\n */\n private static RemoteDocumentChangeBuffer = class extends RemoteDocumentChangeBuffer {\n constructor(private readonly documentCache: MemoryRemoteDocumentCache) {\n super();\n }\n\n protected applyChanges(\n transaction: PersistenceTransaction\n ): PersistencePromise<void> {\n const promises: Array<PersistencePromise<void>> = [];\n this.changes.forEach((key, doc) => {\n if (doc) {\n promises.push(\n this.documentCache.addEntry(transaction, doc, this.readTime)\n );\n } else {\n this.documentCache.removeEntry(key);\n }\n });\n return PersistencePromise.waitFor(promises);\n }\n\n protected getFromCache(\n transaction: PersistenceTransaction,\n documentKey: DocumentKey\n ): PersistencePromise<MaybeDocument | null> {\n return this.documentCache.getEntry(transaction, documentKey);\n }\n\n protected getAllFromCache(\n transaction: PersistenceTransaction,\n documentKeys: DocumentKeySet\n ): PersistencePromise<NullableMaybeDocumentMap> {\n return this.documentCache.getEntries(transaction, documentKeys);\n }\n };\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DocumentKeySet, NullableMaybeDocumentMap } from '../model/collections';\nimport { MaybeDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { debugAssert } from '../util/assert';\nimport { ObjectMap } from '../util/obj_map';\n\nimport { PersistenceTransaction } from './persistence';\nimport { PersistencePromise } from './persistence_promise';\nimport { SnapshotVersion } from '../core/snapshot_version';\n\n/**\n * An in-memory buffer of entries to be written to a RemoteDocumentCache.\n * It can be used to batch up a set of changes to be written to the cache, but\n * additionally supports reading entries back with the `getEntry()` method,\n * falling back to the underlying RemoteDocumentCache if no entry is\n * buffered.\n *\n * Entries added to the cache *must* be read first. This is to facilitate\n * calculating the size delta of the pending changes.\n *\n * PORTING NOTE: This class was implemented then removed from other platforms.\n * If byte-counting ends up being needed on the other platforms, consider\n * porting this class as part of that implementation work.\n */\nexport abstract class RemoteDocumentChangeBuffer {\n // A mapping of document key to the new cache entry that should be written (or null if any\n // existing cache entry should be removed).\n protected changes: ObjectMap<\n DocumentKey,\n MaybeDocument | null\n > = new ObjectMap(key => key.toString());\n\n // The read time to use for all added documents in this change buffer.\n private _readTime: SnapshotVersion | undefined;\n\n private changesApplied = false;\n\n protected abstract getFromCache(\n transaction: PersistenceTransaction,\n documentKey: DocumentKey\n ): PersistencePromise<MaybeDocument | null>;\n\n protected abstract getAllFromCache(\n transaction: PersistenceTransaction,\n documentKeys: DocumentKeySet\n ): PersistencePromise<NullableMaybeDocumentMap>;\n\n protected abstract applyChanges(\n transaction: PersistenceTransaction\n ): PersistencePromise<void>;\n\n protected set readTime(value: SnapshotVersion) {\n // Right now (for simplicity) we just track a single readTime for all the\n // added entries since we expect them to all be the same, but we could\n // rework to store per-entry readTimes if necessary.\n debugAssert(\n this._readTime === undefined || this._readTime.isEqual(value),\n 'All changes in a RemoteDocumentChangeBuffer must have the same read time'\n );\n this._readTime = value;\n }\n\n protected get readTime(): SnapshotVersion {\n debugAssert(\n this._readTime !== undefined,\n 'Read time is not set. All removeEntry() calls must include a readTime if `trackRemovals` is used.'\n );\n return this._readTime;\n }\n\n /**\n * Buffers a `RemoteDocumentCache.addEntry()` call.\n *\n * You can only modify documents that have already been retrieved via\n * `getEntry()/getEntries()` (enforced via IndexedDbs `apply()`).\n */\n addEntry(maybeDocument: MaybeDocument, readTime: SnapshotVersion): void {\n this.assertNotApplied();\n this.readTime = readTime;\n this.changes.set(maybeDocument.key, maybeDocument);\n }\n\n /**\n * Buffers a `RemoteDocumentCache.removeEntry()` call.\n *\n * You can only remove documents that have already been retrieved via\n * `getEntry()/getEntries()` (enforced via IndexedDbs `apply()`).\n */\n removeEntry(key: DocumentKey, readTime?: SnapshotVersion): void {\n this.assertNotApplied();\n if (readTime) {\n this.readTime = readTime;\n }\n this.changes.set(key, null);\n }\n\n /**\n * Looks up an entry in the cache. The buffered changes will first be checked,\n * and if no buffered change applies, this will forward to\n * `RemoteDocumentCache.getEntry()`.\n *\n * @param transaction The transaction in which to perform any persistence\n * operations.\n * @param documentKey The key of the entry to look up.\n * @return The cached Document or NoDocument entry, or null if we have nothing\n * cached.\n */\n getEntry(\n transaction: PersistenceTransaction,\n documentKey: DocumentKey\n ): PersistencePromise<MaybeDocument | null> {\n this.assertNotApplied();\n const bufferedEntry = this.changes.get(documentKey);\n if (bufferedEntry !== undefined) {\n return PersistencePromise.resolve<MaybeDocument | null>(bufferedEntry);\n } else {\n return this.getFromCache(transaction, documentKey);\n }\n }\n\n /**\n * Looks up several entries in the cache, forwarding to\n * `RemoteDocumentCache.getEntry()`.\n *\n * @param transaction The transaction in which to perform any persistence\n * operations.\n * @param documentKeys The keys of the entries to look up.\n * @return A map of cached `Document`s or `NoDocument`s, indexed by key. If an\n * entry cannot be found, the corresponding key will be mapped to a null\n * value.\n */\n getEntries(\n transaction: PersistenceTransaction,\n documentKeys: DocumentKeySet\n ): PersistencePromise<NullableMaybeDocumentMap> {\n return this.getAllFromCache(transaction, documentKeys);\n }\n\n /**\n * Applies buffered changes to the underlying RemoteDocumentCache, using\n * the provided transaction.\n */\n apply(transaction: PersistenceTransaction): PersistencePromise<void> {\n this.assertNotApplied();\n this.changesApplied = true;\n return this.applyChanges(transaction);\n }\n\n /** Helper to assert this.changes is not null */\n protected assertNotApplied(): void {\n debugAssert(!this.changesApplied, 'Changes have already been applied.');\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SnapshotVersion } from '../core/snapshot_version';\nimport { TargetIdGenerator } from '../core/target_id_generator';\nimport { ListenSequenceNumber, TargetId } from '../core/types';\nimport { DocumentKeySet } from '../model/collections';\nimport { DocumentKey } from '../model/document_key';\nimport { debugAssert } from '../util/assert';\nimport { ObjectMap } from '../util/obj_map';\n\nimport { ActiveTargets } from './lru_garbage_collector';\nimport { MemoryPersistence } from './memory_persistence';\nimport { PersistenceTransaction } from './persistence';\nimport { PersistencePromise } from './persistence_promise';\nimport { ReferenceSet } from './reference_set';\nimport { TargetCache } from './target_cache';\nimport { TargetData } from './target_data';\nimport { Target } from '../core/target';\n\nexport class MemoryTargetCache implements TargetCache {\n /**\n * Maps a target to the data about that target\n */\n private targets = new ObjectMap<Target, TargetData>(t => t.canonicalId());\n\n /** The last received snapshot version. */\n private lastRemoteSnapshotVersion = SnapshotVersion.min();\n /** The highest numbered target ID encountered. */\n private highestTargetId: TargetId = 0;\n /** The highest sequence number encountered. */\n private highestSequenceNumber: ListenSequenceNumber = 0;\n /**\n * A ordered bidirectional mapping between documents and the remote target\n * IDs.\n */\n private references = new ReferenceSet();\n\n private targetCount = 0;\n\n private targetIdGenerator = TargetIdGenerator.forTargetCache();\n\n constructor(private readonly persistence: MemoryPersistence) {}\n\n forEachTarget(\n txn: PersistenceTransaction,\n f: (q: TargetData) => void\n ): PersistencePromise<void> {\n this.targets.forEach((_, targetData) => f(targetData));\n return PersistencePromise.resolve();\n }\n\n getLastRemoteSnapshotVersion(\n transaction: PersistenceTransaction\n ): PersistencePromise<SnapshotVersion> {\n return PersistencePromise.resolve(this.lastRemoteSnapshotVersion);\n }\n\n getHighestSequenceNumber(\n transaction: PersistenceTransaction\n ): PersistencePromise<ListenSequenceNumber> {\n return PersistencePromise.resolve(this.highestSequenceNumber);\n }\n\n allocateTargetId(\n transaction: PersistenceTransaction\n ): PersistencePromise<TargetId> {\n this.highestTargetId = this.targetIdGenerator.next();\n return PersistencePromise.resolve(this.highestTargetId);\n }\n\n setTargetsMetadata(\n transaction: PersistenceTransaction,\n highestListenSequenceNumber: number,\n lastRemoteSnapshotVersion?: SnapshotVersion\n ): PersistencePromise<void> {\n if (lastRemoteSnapshotVersion) {\n this.lastRemoteSnapshotVersion = lastRemoteSnapshotVersion;\n }\n if (highestListenSequenceNumber > this.highestSequenceNumber) {\n this.highestSequenceNumber = highestListenSequenceNumber;\n }\n return PersistencePromise.resolve();\n }\n\n private saveTargetData(targetData: TargetData): void {\n this.targets.set(targetData.target, targetData);\n const targetId = targetData.targetId;\n if (targetId > this.highestTargetId) {\n this.targetIdGenerator = new TargetIdGenerator(targetId);\n this.highestTargetId = targetId;\n }\n if (targetData.sequenceNumber > this.highestSequenceNumber) {\n this.highestSequenceNumber = targetData.sequenceNumber;\n }\n }\n\n addTargetData(\n transaction: PersistenceTransaction,\n targetData: TargetData\n ): PersistencePromise<void> {\n debugAssert(\n !this.targets.has(targetData.target),\n 'Adding a target that already exists'\n );\n this.saveTargetData(targetData);\n this.targetCount += 1;\n return PersistencePromise.resolve();\n }\n\n updateTargetData(\n transaction: PersistenceTransaction,\n targetData: TargetData\n ): PersistencePromise<void> {\n debugAssert(\n this.targets.has(targetData.target),\n 'Updating a non-existent target'\n );\n this.saveTargetData(targetData);\n return PersistencePromise.resolve();\n }\n\n removeTargetData(\n transaction: PersistenceTransaction,\n targetData: TargetData\n ): PersistencePromise<void> {\n debugAssert(this.targetCount > 0, 'Removing a target from an empty cache');\n debugAssert(\n this.targets.has(targetData.target),\n 'Removing a non-existent target from the cache'\n );\n this.targets.delete(targetData.target);\n this.references.removeReferencesForId(targetData.targetId);\n this.targetCount -= 1;\n return PersistencePromise.resolve();\n }\n\n removeTargets(\n transaction: PersistenceTransaction,\n upperBound: ListenSequenceNumber,\n activeTargetIds: ActiveTargets\n ): PersistencePromise<number> {\n let count = 0;\n const removals: Array<PersistencePromise<void>> = [];\n this.targets.forEach((key, targetData) => {\n if (\n targetData.sequenceNumber <= upperBound &&\n activeTargetIds.get(targetData.targetId) === null\n ) {\n this.targets.delete(key);\n removals.push(\n this.removeMatchingKeysForTargetId(transaction, targetData.targetId)\n );\n count++;\n }\n });\n return PersistencePromise.waitFor(removals).next(() => count);\n }\n\n getTargetCount(\n transaction: PersistenceTransaction\n ): PersistencePromise<number> {\n return PersistencePromise.resolve(this.targetCount);\n }\n\n getTargetData(\n transaction: PersistenceTransaction,\n target: Target\n ): PersistencePromise<TargetData | null> {\n const targetData = this.targets.get(target) || null;\n return PersistencePromise.resolve(targetData);\n }\n\n addMatchingKeys(\n txn: PersistenceTransaction,\n keys: DocumentKeySet,\n targetId: TargetId\n ): PersistencePromise<void> {\n this.references.addReferences(keys, targetId);\n return PersistencePromise.resolve();\n }\n\n removeMatchingKeys(\n txn: PersistenceTransaction,\n keys: DocumentKeySet,\n targetId: TargetId\n ): PersistencePromise<void> {\n this.references.removeReferences(keys, targetId);\n const referenceDelegate = this.persistence.referenceDelegate;\n const promises: Array<PersistencePromise<void>> = [];\n if (referenceDelegate) {\n keys.forEach(key => {\n promises.push(referenceDelegate.markPotentiallyOrphaned(txn, key));\n });\n }\n return PersistencePromise.waitFor(promises);\n }\n\n removeMatchingKeysForTargetId(\n txn: PersistenceTransaction,\n targetId: TargetId\n ): PersistencePromise<void> {\n this.references.removeReferencesForId(targetId);\n return PersistencePromise.resolve();\n }\n\n getMatchingKeysForTargetId(\n txn: PersistenceTransaction,\n targetId: TargetId\n ): PersistencePromise<DocumentKeySet> {\n const matchingKeys = this.references.referencesForId(targetId);\n return PersistencePromise.resolve(matchingKeys);\n }\n\n containsKey(\n txn: PersistenceTransaction,\n key: DocumentKey\n ): PersistencePromise<boolean> {\n return PersistencePromise.resolve(this.references.containsKey(key));\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { User } from '../auth/user';\nimport { Document, MaybeDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { fail } from '../util/assert';\nimport { logDebug } from '../util/log';\nimport { ObjectMap } from '../util/obj_map';\nimport { encodeResourcePath } from './encoded_resource_path';\nimport {\n ActiveTargets,\n LruDelegate,\n LruGarbageCollector,\n LruParams\n} from './lru_garbage_collector';\nimport { ListenSequence } from '../core/listen_sequence';\nimport { ListenSequenceNumber, TargetId } from '../core/types';\nimport { estimateByteSize } from '../model/values';\nimport { MemoryIndexManager } from './memory_index_manager';\nimport { MemoryMutationQueue } from './memory_mutation_queue';\nimport { MemoryRemoteDocumentCache } from './memory_remote_document_cache';\nimport { MemoryTargetCache } from './memory_target_cache';\nimport { MutationQueue } from './mutation_queue';\nimport {\n Persistence,\n PersistenceTransaction,\n PersistenceTransactionMode,\n ReferenceDelegate\n} from './persistence';\nimport { PersistencePromise } from './persistence_promise';\nimport { ReferenceSet } from './reference_set';\nimport { TargetData } from './target_data';\n\nconst LOG_TAG = 'MemoryPersistence';\n/**\n * A memory-backed instance of Persistence. Data is stored only in RAM and\n * not persisted across sessions.\n */\nexport class MemoryPersistence implements Persistence {\n /**\n * Note that these are retained here to make it easier to write tests\n * affecting both the in-memory and IndexedDB-backed persistence layers. Tests\n * can create a new LocalStore wrapping this Persistence instance and this\n * will make the in-memory persistence layer behave as if it were actually\n * persisting values.\n */\n private readonly indexManager: MemoryIndexManager;\n private mutationQueues: { [user: string]: MemoryMutationQueue } = {};\n private readonly remoteDocumentCache: MemoryRemoteDocumentCache;\n private readonly targetCache: MemoryTargetCache;\n private readonly listenSequence = new ListenSequence(0);\n\n private _started = false;\n\n readonly referenceDelegate: MemoryReferenceDelegate;\n\n /**\n * The constructor accepts a factory for creating a reference delegate. This\n * allows both the delegate and this instance to have strong references to\n * each other without having nullable fields that would then need to be\n * checked or asserted on every access.\n */\n constructor(\n referenceDelegateFactory: (p: MemoryPersistence) => MemoryReferenceDelegate\n ) {\n this._started = true;\n this.referenceDelegate = referenceDelegateFactory(this);\n this.targetCache = new MemoryTargetCache(this);\n const sizer = (doc: MaybeDocument): number =>\n this.referenceDelegate.documentSize(doc);\n this.indexManager = new MemoryIndexManager();\n this.remoteDocumentCache = new MemoryRemoteDocumentCache(\n this.indexManager,\n sizer\n );\n }\n\n start(): Promise<void> {\n return Promise.resolve();\n }\n\n shutdown(): Promise<void> {\n // No durable state to ensure is closed on shutdown.\n this._started = false;\n return Promise.resolve();\n }\n\n get started(): boolean {\n return this._started;\n }\n\n setDatabaseDeletedListener(): void {\n // No op.\n }\n\n getIndexManager(): MemoryIndexManager {\n return this.indexManager;\n }\n\n getMutationQueue(user: User): MutationQueue {\n let queue = this.mutationQueues[user.toKey()];\n if (!queue) {\n queue = new MemoryMutationQueue(\n this.indexManager,\n this.referenceDelegate\n );\n this.mutationQueues[user.toKey()] = queue;\n }\n return queue;\n }\n\n getTargetCache(): MemoryTargetCache {\n return this.targetCache;\n }\n\n getRemoteDocumentCache(): MemoryRemoteDocumentCache {\n return this.remoteDocumentCache;\n }\n\n runTransaction<T>(\n action: string,\n mode: PersistenceTransactionMode,\n transactionOperation: (\n transaction: PersistenceTransaction\n ) => PersistencePromise<T>\n ): Promise<T> {\n logDebug(LOG_TAG, 'Starting transaction:', action);\n const txn = new MemoryTransaction(this.listenSequence.next());\n this.referenceDelegate.onTransactionStarted();\n return transactionOperation(txn)\n .next(result => {\n return this.referenceDelegate\n .onTransactionCommitted(txn)\n .next(() => result);\n })\n .toPromise()\n .then(result => {\n txn.raiseOnCommittedEvent();\n return result;\n });\n }\n\n mutationQueuesContainKey(\n transaction: PersistenceTransaction,\n key: DocumentKey\n ): PersistencePromise<boolean> {\n return PersistencePromise.or(\n Object.values(this.mutationQueues).map(queue => () =>\n queue.containsKey(transaction, key)\n )\n );\n }\n}\n\n/**\n * Memory persistence is not actually transactional, but future implementations\n * may have transaction-scoped state.\n */\nexport class MemoryTransaction extends PersistenceTransaction {\n constructor(readonly currentSequenceNumber: ListenSequenceNumber) {\n super();\n }\n}\n\nexport interface MemoryReferenceDelegate extends ReferenceDelegate {\n documentSize(doc: MaybeDocument): number;\n onTransactionStarted(): void;\n onTransactionCommitted(txn: PersistenceTransaction): PersistencePromise<void>;\n}\n\nexport class MemoryEagerDelegate implements MemoryReferenceDelegate {\n /** Tracks all documents that are active in Query views. */\n private localViewReferences: ReferenceSet = new ReferenceSet();\n /** The list of documents that are potentially GCed after each transaction. */\n private _orphanedDocuments: Set<DocumentKey> | null = null;\n\n private constructor(private readonly persistence: MemoryPersistence) {}\n\n static factory(persistence: MemoryPersistence): MemoryEagerDelegate {\n return new MemoryEagerDelegate(persistence);\n }\n\n private get orphanedDocuments(): Set<DocumentKey> {\n if (!this._orphanedDocuments) {\n throw fail('orphanedDocuments is only valid during a transaction.');\n } else {\n return this._orphanedDocuments;\n }\n }\n\n addReference(\n txn: PersistenceTransaction,\n targetId: TargetId,\n key: DocumentKey\n ): PersistencePromise<void> {\n this.localViewReferences.addReference(key, targetId);\n this.orphanedDocuments.delete(key);\n return PersistencePromise.resolve();\n }\n\n removeReference(\n txn: PersistenceTransaction,\n targetId: TargetId,\n key: DocumentKey\n ): PersistencePromise<void> {\n this.localViewReferences.removeReference(key, targetId);\n this.orphanedDocuments.add(key);\n return PersistencePromise.resolve();\n }\n\n markPotentiallyOrphaned(\n txn: PersistenceTransaction,\n key: DocumentKey\n ): PersistencePromise<void> {\n this.orphanedDocuments.add(key);\n return PersistencePromise.resolve();\n }\n\n removeTarget(\n txn: PersistenceTransaction,\n targetData: TargetData\n ): PersistencePromise<void> {\n const orphaned = this.localViewReferences.removeReferencesForId(\n targetData.targetId\n );\n orphaned.forEach(key => this.orphanedDocuments.add(key));\n const cache = this.persistence.getTargetCache();\n return cache\n .getMatchingKeysForTargetId(txn, targetData.targetId)\n .next(keys => {\n keys.forEach(key => this.orphanedDocuments.add(key));\n })\n .next(() => cache.removeTargetData(txn, targetData));\n }\n\n onTransactionStarted(): void {\n this._orphanedDocuments = new Set<DocumentKey>();\n }\n\n onTransactionCommitted(\n txn: PersistenceTransaction\n ): PersistencePromise<void> {\n // Remove newly orphaned documents.\n const cache = this.persistence.getRemoteDocumentCache();\n const changeBuffer = cache.newChangeBuffer();\n return PersistencePromise.forEach(\n this.orphanedDocuments,\n (key: DocumentKey) => {\n return this.isReferenced(txn, key).next(isReferenced => {\n if (!isReferenced) {\n changeBuffer.removeEntry(key);\n }\n });\n }\n ).next(() => {\n this._orphanedDocuments = null;\n return changeBuffer.apply(txn);\n });\n }\n\n updateLimboDocument(\n txn: PersistenceTransaction,\n key: DocumentKey\n ): PersistencePromise<void> {\n return this.isReferenced(txn, key).next(isReferenced => {\n if (isReferenced) {\n this.orphanedDocuments.delete(key);\n } else {\n this.orphanedDocuments.add(key);\n }\n });\n }\n\n documentSize(doc: MaybeDocument): number {\n // For eager GC, we don't care about the document size, there are no size thresholds.\n return 0;\n }\n\n private isReferenced(\n txn: PersistenceTransaction,\n key: DocumentKey\n ): PersistencePromise<boolean> {\n return PersistencePromise.or([\n () =>\n PersistencePromise.resolve(this.localViewReferences.containsKey(key)),\n () => this.persistence.getTargetCache().containsKey(txn, key),\n () => this.persistence.mutationQueuesContainKey(txn, key)\n ]);\n }\n}\n\nexport class MemoryLruDelegate implements ReferenceDelegate, LruDelegate {\n private orphanedSequenceNumbers: ObjectMap<\n DocumentKey,\n ListenSequenceNumber\n > = new ObjectMap(k => encodeResourcePath(k.path));\n\n readonly garbageCollector: LruGarbageCollector;\n\n constructor(\n private readonly persistence: MemoryPersistence,\n lruParams: LruParams\n ) {\n this.garbageCollector = new LruGarbageCollector(this, lruParams);\n }\n\n // No-ops, present so memory persistence doesn't have to care which delegate\n // it has.\n onTransactionStarted(): void {}\n\n onTransactionCommitted(\n txn: PersistenceTransaction\n ): PersistencePromise<void> {\n return PersistencePromise.resolve();\n }\n\n forEachTarget(\n txn: PersistenceTransaction,\n f: (q: TargetData) => void\n ): PersistencePromise<void> {\n return this.persistence.getTargetCache().forEachTarget(txn, f);\n }\n\n getSequenceNumberCount(\n txn: PersistenceTransaction\n ): PersistencePromise<number> {\n const docCountPromise = this.orphanedDocumentCount(txn);\n const targetCountPromise = this.persistence\n .getTargetCache()\n .getTargetCount(txn);\n return targetCountPromise.next(targetCount =>\n docCountPromise.next(docCount => targetCount + docCount)\n );\n }\n\n private orphanedDocumentCount(\n txn: PersistenceTransaction\n ): PersistencePromise<number> {\n let orphanedCount = 0;\n return this.forEachOrphanedDocumentSequenceNumber(txn, _ => {\n orphanedCount++;\n }).next(() => orphanedCount);\n }\n\n forEachOrphanedDocumentSequenceNumber(\n txn: PersistenceTransaction,\n f: (sequenceNumber: ListenSequenceNumber) => void\n ): PersistencePromise<void> {\n return PersistencePromise.forEach(\n this.orphanedSequenceNumbers,\n (key, sequenceNumber) => {\n // Pass in the exact sequence number as the upper bound so we know it won't be pinned by\n // being too recent.\n return this.isPinned(txn, key, sequenceNumber).next(isPinned => {\n if (!isPinned) {\n return f(sequenceNumber);\n } else {\n return PersistencePromise.resolve();\n }\n });\n }\n );\n }\n\n removeTargets(\n txn: PersistenceTransaction,\n upperBound: ListenSequenceNumber,\n activeTargetIds: ActiveTargets\n ): PersistencePromise<number> {\n return this.persistence\n .getTargetCache()\n .removeTargets(txn, upperBound, activeTargetIds);\n }\n\n removeOrphanedDocuments(\n txn: PersistenceTransaction,\n upperBound: ListenSequenceNumber\n ): PersistencePromise<number> {\n let count = 0;\n const cache = this.persistence.getRemoteDocumentCache();\n const changeBuffer = cache.newChangeBuffer();\n const p = cache.forEachDocumentKey(txn, key => {\n return this.isPinned(txn, key, upperBound).next(isPinned => {\n if (!isPinned) {\n count++;\n changeBuffer.removeEntry(key);\n }\n });\n });\n return p.next(() => changeBuffer.apply(txn)).next(() => count);\n }\n\n markPotentiallyOrphaned(\n txn: PersistenceTransaction,\n key: DocumentKey\n ): PersistencePromise<void> {\n this.orphanedSequenceNumbers.set(key, txn.currentSequenceNumber);\n return PersistencePromise.resolve();\n }\n\n removeTarget(\n txn: PersistenceTransaction,\n targetData: TargetData\n ): PersistencePromise<void> {\n const updated = targetData.withSequenceNumber(txn.currentSequenceNumber);\n return this.persistence.getTargetCache().updateTargetData(txn, updated);\n }\n\n addReference(\n txn: PersistenceTransaction,\n targetId: TargetId,\n key: DocumentKey\n ): PersistencePromise<void> {\n this.orphanedSequenceNumbers.set(key, txn.currentSequenceNumber);\n return PersistencePromise.resolve();\n }\n\n removeReference(\n txn: PersistenceTransaction,\n targetId: TargetId,\n key: DocumentKey\n ): PersistencePromise<void> {\n this.orphanedSequenceNumbers.set(key, txn.currentSequenceNumber);\n return PersistencePromise.resolve();\n }\n\n updateLimboDocument(\n txn: PersistenceTransaction,\n key: DocumentKey\n ): PersistencePromise<void> {\n this.orphanedSequenceNumbers.set(key, txn.currentSequenceNumber);\n return PersistencePromise.resolve();\n }\n\n documentSize(maybeDoc: MaybeDocument): number {\n let documentSize = maybeDoc.key.toString().length;\n if (maybeDoc instanceof Document) {\n documentSize += estimateByteSize(maybeDoc.toProto());\n }\n return documentSize;\n }\n\n private isPinned(\n txn: PersistenceTransaction,\n key: DocumentKey,\n upperBound: ListenSequenceNumber\n ): PersistencePromise<boolean> {\n return PersistencePromise.or([\n () => this.persistence.mutationQueuesContainKey(txn, key),\n () => this.persistence.getTargetCache().containsKey(txn, key),\n () => {\n const orphanedAt = this.orphanedSequenceNumbers.get(key);\n return PersistencePromise.resolve(\n orphanedAt !== undefined && orphanedAt > upperBound\n );\n }\n ]);\n }\n\n getCacheSize(txn: PersistenceTransaction): PersistencePromise<number> {\n return this.persistence.getRemoteDocumentCache().getSize(txn);\n }\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n ClientId,\n MemorySharedClientState,\n SharedClientState,\n WebStorageSharedClientState\n} from '../local/shared_client_state';\nimport { LocalStore, MultiTabLocalStore } from '../local/local_store';\nimport { MultiTabSyncEngine, SyncEngine } from './sync_engine';\nimport { RemoteStore } from '../remote/remote_store';\nimport { EventManager } from './event_manager';\nimport { AsyncQueue } from '../util/async_queue';\nimport { DatabaseInfo } from './database_info';\nimport { Platform } from '../platform/platform';\nimport { Datastore } from '../remote/datastore';\nimport { User } from '../auth/user';\nimport { PersistenceSettings } from './firestore_client';\nimport { debugAssert } from '../util/assert';\nimport { GarbageCollectionScheduler, Persistence } from '../local/persistence';\nimport { Code, FirestoreError } from '../util/error';\nimport { OnlineStateSource } from './types';\nimport { LruParams, LruScheduler } from '../local/lru_garbage_collector';\nimport { IndexFreeQueryEngine } from '../local/index_free_query_engine';\nimport { IndexedDbPersistence } from '../local/indexeddb_persistence';\nimport {\n MemoryEagerDelegate,\n MemoryPersistence\n} from '../local/memory_persistence';\n\nconst MEMORY_ONLY_PERSISTENCE_ERROR_MESSAGE =\n 'You are using the memory-only build of Firestore. Persistence support is ' +\n 'only available via the @firebase/firestore bundle or the ' +\n 'firebase-firestore.js build.';\n\nexport interface ComponentConfiguration {\n asyncQueue: AsyncQueue;\n databaseInfo: DatabaseInfo;\n platform: Platform;\n datastore: Datastore;\n clientId: ClientId;\n initialUser: User;\n maxConcurrentLimboResolutions: number;\n persistenceSettings: PersistenceSettings;\n}\n\n/**\n * Initializes and wires up all core components for Firestore. Implementations\n * override `initialize()` to provide all components.\n */\nexport interface ComponentProvider {\n persistence: Persistence;\n sharedClientState: SharedClientState;\n localStore: LocalStore;\n syncEngine: SyncEngine;\n gcScheduler: GarbageCollectionScheduler | null;\n remoteStore: RemoteStore;\n eventManager: EventManager;\n\n initialize(cfg: ComponentConfiguration): Promise<void>;\n\n clearPersistence(databaseId: DatabaseInfo): Promise<void>;\n}\n\n/**\n * Provides all components needed for Firestore with in-memory persistence.\n * Uses EagerGC garbage collection.\n */\nexport class MemoryComponentProvider implements ComponentProvider {\n persistence!: Persistence;\n sharedClientState!: SharedClientState;\n localStore!: LocalStore;\n syncEngine!: SyncEngine;\n gcScheduler!: GarbageCollectionScheduler | null;\n remoteStore!: RemoteStore;\n eventManager!: EventManager;\n\n async initialize(cfg: ComponentConfiguration): Promise<void> {\n this.sharedClientState = this.createSharedClientState(cfg);\n this.persistence = this.createPersistence(cfg);\n await this.persistence.start();\n this.gcScheduler = this.createGarbageCollectionScheduler(cfg);\n this.localStore = this.createLocalStore(cfg);\n this.remoteStore = this.createRemoteStore(cfg);\n this.syncEngine = this.createSyncEngine(cfg);\n this.eventManager = this.createEventManager(cfg);\n\n this.sharedClientState.onlineStateHandler = onlineState =>\n this.syncEngine.applyOnlineStateChange(\n onlineState,\n OnlineStateSource.SharedClientState\n );\n this.remoteStore.syncEngine = this.syncEngine;\n\n await this.localStore.start();\n await this.sharedClientState.start();\n await this.remoteStore.start();\n\n await this.remoteStore.applyPrimaryState(this.syncEngine.isPrimaryClient);\n }\n\n createEventManager(cfg: ComponentConfiguration): EventManager {\n return new EventManager(this.syncEngine);\n }\n\n createGarbageCollectionScheduler(\n cfg: ComponentConfiguration\n ): GarbageCollectionScheduler | null {\n return null;\n }\n\n createLocalStore(cfg: ComponentConfiguration): LocalStore {\n return new LocalStore(\n this.persistence,\n new IndexFreeQueryEngine(),\n cfg.initialUser\n );\n }\n\n createPersistence(cfg: ComponentConfiguration): Persistence {\n debugAssert(\n !cfg.persistenceSettings.durable,\n 'Can only start memory persistence'\n );\n return new MemoryPersistence(MemoryEagerDelegate.factory);\n }\n\n createRemoteStore(cfg: ComponentConfiguration): RemoteStore {\n return new RemoteStore(\n this.localStore,\n cfg.datastore,\n cfg.asyncQueue,\n onlineState =>\n this.syncEngine.applyOnlineStateChange(\n onlineState,\n OnlineStateSource.RemoteStore\n ),\n cfg.platform.newConnectivityMonitor()\n );\n }\n\n createSharedClientState(cfg: ComponentConfiguration): SharedClientState {\n return new MemorySharedClientState();\n }\n\n createSyncEngine(cfg: ComponentConfiguration): SyncEngine {\n return new SyncEngine(\n this.localStore,\n this.remoteStore,\n this.sharedClientState,\n cfg.initialUser,\n cfg.maxConcurrentLimboResolutions\n );\n }\n\n clearPersistence(databaseInfo: DatabaseInfo): Promise<void> {\n throw new FirestoreError(\n Code.FAILED_PRECONDITION,\n MEMORY_ONLY_PERSISTENCE_ERROR_MESSAGE\n );\n }\n}\n\n/**\n * Provides all components needed for Firestore with IndexedDB persistence.\n */\nexport class IndexedDbComponentProvider extends MemoryComponentProvider {\n persistence!: IndexedDbPersistence;\n\n // TODO(tree-shaking): Create an IndexedDbComponentProvider and a\n // MultiTabComponentProvider. The IndexedDbComponentProvider should depend\n // on LocalStore and SyncEngine.\n localStore!: MultiTabLocalStore;\n syncEngine!: MultiTabSyncEngine;\n\n async initialize(cfg: ComponentConfiguration): Promise<void> {\n await super.initialize(cfg);\n\n // NOTE: This will immediately call the listener, so we make sure to\n // set it after localStore / remoteStore are started.\n await this.persistence.setPrimaryStateListener(async isPrimary => {\n await (this.syncEngine as MultiTabSyncEngine).applyPrimaryState(\n isPrimary\n );\n if (this.gcScheduler) {\n if (isPrimary && !this.gcScheduler.started) {\n this.gcScheduler.start(this.localStore);\n } else if (!isPrimary) {\n this.gcScheduler.stop();\n }\n }\n });\n }\n\n createLocalStore(cfg: ComponentConfiguration): LocalStore {\n return new MultiTabLocalStore(\n this.persistence,\n new IndexFreeQueryEngine(),\n cfg.initialUser\n );\n }\n\n createSyncEngine(cfg: ComponentConfiguration): SyncEngine {\n const syncEngine = new MultiTabSyncEngine(\n this.localStore,\n this.remoteStore,\n this.sharedClientState,\n cfg.initialUser,\n cfg.maxConcurrentLimboResolutions\n );\n if (this.sharedClientState instanceof WebStorageSharedClientState) {\n this.sharedClientState.syncEngine = syncEngine;\n }\n return syncEngine;\n }\n\n createGarbageCollectionScheduler(\n cfg: ComponentConfiguration\n ): GarbageCollectionScheduler | null {\n const garbageCollector = this.persistence.referenceDelegate\n .garbageCollector;\n return new LruScheduler(garbageCollector, cfg.asyncQueue);\n }\n\n createPersistence(cfg: ComponentConfiguration): Persistence {\n debugAssert(\n cfg.persistenceSettings.durable,\n 'Can only start durable persistence'\n );\n\n const persistenceKey = IndexedDbPersistence.buildStoragePrefix(\n cfg.databaseInfo\n );\n const serializer = cfg.platform.newSerializer(cfg.databaseInfo.databaseId);\n return new IndexedDbPersistence(\n cfg.persistenceSettings.synchronizeTabs,\n persistenceKey,\n cfg.clientId,\n cfg.platform,\n LruParams.withCacheSize(cfg.persistenceSettings.cacheSizeBytes),\n cfg.asyncQueue,\n serializer,\n this.sharedClientState\n );\n }\n\n createSharedClientState(cfg: ComponentConfiguration): SharedClientState {\n if (\n cfg.persistenceSettings.durable &&\n cfg.persistenceSettings.synchronizeTabs\n ) {\n if (!WebStorageSharedClientState.isAvailable(cfg.platform)) {\n throw new FirestoreError(\n Code.UNIMPLEMENTED,\n 'IndexedDB persistence is only available on platforms that support LocalStorage.'\n );\n }\n const persistenceKey = IndexedDbPersistence.buildStoragePrefix(\n cfg.databaseInfo\n );\n return new WebStorageSharedClientState(\n cfg.asyncQueue,\n cfg.platform,\n persistenceKey,\n cfg.clientId,\n cfg.initialUser\n );\n }\n return new MemorySharedClientState();\n }\n\n clearPersistence(databaseInfo: DatabaseInfo): Promise<void> {\n const persistenceKey = IndexedDbPersistence.buildStoragePrefix(\n databaseInfo\n );\n return IndexedDbPersistence.clearPersistence(persistenceKey);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { CredentialsProvider } from '../api/credentials';\nimport { User } from '../auth/user';\nimport { LocalStore } from '../local/local_store';\nimport { GarbageCollectionScheduler, Persistence } from '../local/persistence';\nimport { Document, NoDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { Mutation } from '../model/mutation';\nimport { Platform } from '../platform/platform';\nimport { newDatastore } from '../remote/datastore';\nimport { RemoteStore } from '../remote/remote_store';\nimport { AsyncQueue, wrapInUserErrorIfRecoverable } from '../util/async_queue';\nimport { Code, FirestoreError } from '../util/error';\nimport { logDebug } from '../util/log';\nimport { Deferred } from '../util/promise';\nimport {\n EventManager,\n ListenOptions,\n Observer,\n QueryListener\n} from './event_manager';\nimport { SyncEngine } from './sync_engine';\nimport { View } from './view';\n\nimport { SharedClientState } from '../local/shared_client_state';\nimport { AutoId } from '../util/misc';\nimport { DatabaseId, DatabaseInfo } from './database_info';\nimport { Query } from './query';\nimport { Transaction } from './transaction';\nimport { ViewSnapshot } from './view_snapshot';\nimport {\n ComponentProvider,\n MemoryComponentProvider\n} from './component_provider';\n\nconst LOG_TAG = 'FirestoreClient';\nconst MAX_CONCURRENT_LIMBO_RESOLUTIONS = 100;\n\n/** DOMException error code constants. */\nconst DOM_EXCEPTION_INVALID_STATE = 11;\nconst DOM_EXCEPTION_ABORTED = 20;\nconst DOM_EXCEPTION_QUOTA_EXCEEDED = 22;\n\nexport type PersistenceSettings =\n | {\n readonly durable: false;\n }\n | {\n readonly durable: true;\n readonly cacheSizeBytes: number;\n readonly synchronizeTabs: boolean;\n };\n\n/**\n * FirestoreClient is a top-level class that constructs and owns all of the\n * pieces of the client SDK architecture. It is responsible for creating the\n * async queue that is shared by all of the other components in the system.\n */\nexport class FirestoreClient {\n // NOTE: These should technically have '|undefined' in the types, since\n // they're initialized asynchronously rather than in the constructor, but\n // given that all work is done on the async queue and we assert that\n // initialization completes before any other work is queued, we're cheating\n // with the types rather than littering the code with '!' or unnecessary\n // undefined checks.\n private eventMgr!: EventManager;\n private persistence!: Persistence;\n private localStore!: LocalStore;\n private remoteStore!: RemoteStore;\n private syncEngine!: SyncEngine;\n private gcScheduler!: GarbageCollectionScheduler | null;\n\n // PORTING NOTE: SharedClientState is only used for multi-tab web.\n private sharedClientState!: SharedClientState;\n\n private readonly clientId = AutoId.newId();\n\n constructor(\n private platform: Platform,\n private databaseInfo: DatabaseInfo,\n private credentials: CredentialsProvider,\n /**\n * Asynchronous queue responsible for all of our internal processing. When\n * we get incoming work from the user (via public API) or the network\n * (incoming GRPC messages), we should always schedule onto this queue.\n * This ensures all of our work is properly serialized (e.g. we don't\n * start processing a new operation while the previous one is waiting for\n * an async I/O to complete).\n */\n private asyncQueue: AsyncQueue\n ) {}\n\n /**\n * Starts up the FirestoreClient, returning only whether or not enabling\n * persistence succeeded.\n *\n * The intent here is to \"do the right thing\" as far as users are concerned.\n * Namely, in cases where offline persistence is requested and possible,\n * enable it, but otherwise fall back to persistence disabled. For the most\n * part we expect this to succeed one way or the other so we don't expect our\n * users to actually wait on the firestore.enablePersistence Promise since\n * they generally won't care.\n *\n * Of course some users actually do care about whether or not persistence\n * was successfully enabled, so the Promise returned from this method\n * indicates this outcome.\n *\n * This presents a problem though: even before enablePersistence resolves or\n * rejects, users may have made calls to e.g. firestore.collection() which\n * means that the FirestoreClient in there will be available and will be\n * enqueuing actions on the async queue.\n *\n * Meanwhile any failure of an operation on the async queue causes it to\n * panic and reject any further work, on the premise that unhandled errors\n * are fatal.\n *\n * Consequently the fallback is handled internally here in start, and if the\n * fallback succeeds we signal success to the async queue even though the\n * start() itself signals failure.\n *\n * @param componentProvider Provider that returns all core components.\n * @param persistenceSettings Settings object to configure offline\n * persistence.\n * @returns A deferred result indicating the user-visible result of enabling\n * offline persistence. This method will reject this if IndexedDB fails to\n * start for any reason. If usePersistence is false this is\n * unconditionally resolved.\n */\n start(\n componentProvider: ComponentProvider,\n persistenceSettings: PersistenceSettings\n ): Promise<void> {\n this.verifyNotTerminated();\n // We defer our initialization until we get the current user from\n // setChangeListener(). We block the async queue until we got the initial\n // user and the initialization is completed. This will prevent any scheduled\n // work from happening before initialization is completed.\n //\n // If initializationDone resolved then the FirestoreClient is in a usable\n // state.\n const initializationDone = new Deferred<void>();\n\n // If usePersistence is true, certain classes of errors while starting are\n // recoverable but only by falling back to persistence disabled.\n //\n // If there's an error in the first case but not in recovery we cannot\n // reject the promise blocking the async queue because this will cause the\n // async queue to panic.\n const persistenceResult = new Deferred<void>();\n\n let initialized = false;\n this.credentials.setChangeListener(user => {\n if (!initialized) {\n initialized = true;\n\n logDebug(LOG_TAG, 'Initializing. user=', user.uid);\n\n return this.initializeComponents(\n componentProvider,\n persistenceSettings,\n user,\n persistenceResult\n ).then(initializationDone.resolve, initializationDone.reject);\n } else {\n this.asyncQueue.enqueueRetryable(() => {\n return this.handleCredentialChange(user);\n });\n }\n });\n\n // Block the async queue until initialization is done\n this.asyncQueue.enqueueAndForget(() => {\n return initializationDone.promise;\n });\n\n // Return only the result of enabling persistence. Note that this does not\n // need to await the completion of initializationDone because the result of\n // this method should not reflect any other kind of failure to start.\n return persistenceResult.promise;\n }\n\n /** Enables the network connection and requeues all pending operations. */\n enableNetwork(): Promise<void> {\n this.verifyNotTerminated();\n return this.asyncQueue.enqueue(() => {\n return this.syncEngine.enableNetwork();\n });\n }\n\n /**\n * Initializes persistent storage, attempting to use IndexedDB if\n * usePersistence is true or memory-only if false.\n *\n * If IndexedDB fails because it's already open in another tab or because the\n * platform can't possibly support our implementation then this method rejects\n * the persistenceResult and falls back on memory-only persistence.\n *\n * @param componentProvider The provider that provides all core componennts\n * for IndexedDB or memory-backed persistence\n * @param persistenceSettings Settings object to configure offline persistence\n * @param user The initial user\n * @param persistenceResult A deferred result indicating the user-visible\n * result of enabling offline persistence. This method will reject this if\n * IndexedDB fails to start for any reason. If usePersistence is false\n * this is unconditionally resolved.\n * @returns a Promise indicating whether or not initialization should\n * continue, i.e. that one of the persistence implementations actually\n * succeeded.\n */\n private async initializeComponents(\n componentProvider: ComponentProvider,\n persistenceSettings: PersistenceSettings,\n user: User,\n persistenceResult: Deferred<void>\n ): Promise<void> {\n try {\n // TODO(mrschmidt): Ideally, ComponentProvider would also initialize\n // Datastore (without duplicating the initializing logic once per\n // provider).\n\n const connection = await this.platform.loadConnection(this.databaseInfo);\n const serializer = this.platform.newSerializer(\n this.databaseInfo.databaseId\n );\n const datastore = newDatastore(connection, this.credentials, serializer);\n\n await componentProvider.initialize({\n asyncQueue: this.asyncQueue,\n databaseInfo: this.databaseInfo,\n platform: this.platform,\n datastore,\n clientId: this.clientId,\n initialUser: user,\n maxConcurrentLimboResolutions: MAX_CONCURRENT_LIMBO_RESOLUTIONS,\n persistenceSettings\n });\n\n this.persistence = componentProvider.persistence;\n this.sharedClientState = componentProvider.sharedClientState;\n this.localStore = componentProvider.localStore;\n this.remoteStore = componentProvider.remoteStore;\n this.syncEngine = componentProvider.syncEngine;\n this.gcScheduler = componentProvider.gcScheduler;\n this.eventMgr = componentProvider.eventManager;\n\n // When a user calls clearPersistence() in one client, all other clients\n // need to be terminated to allow the delete to succeed.\n this.persistence.setDatabaseDeletedListener(async () => {\n await this.terminate();\n });\n\n persistenceResult.resolve();\n } catch (error) {\n // Regardless of whether or not the retry succeeds, from an user\n // perspective, offline persistence has failed.\n persistenceResult.reject(error);\n\n // An unknown failure on the first stage shuts everything down.\n if (!this.canFallback(error)) {\n throw error;\n }\n console.warn(\n 'Error enabling offline persistence. Falling back to' +\n ' persistence disabled: ' +\n error\n );\n return this.initializeComponents(\n new MemoryComponentProvider(),\n { durable: false },\n user,\n persistenceResult\n );\n }\n }\n\n /**\n * Decides whether the provided error allows us to gracefully disable\n * persistence (as opposed to crashing the client).\n */\n private canFallback(error: FirestoreError | DOMException): boolean {\n if (error.name === 'FirebaseError') {\n return (\n error.code === Code.FAILED_PRECONDITION ||\n error.code === Code.UNIMPLEMENTED\n );\n } else if (\n typeof DOMException !== 'undefined' &&\n error instanceof DOMException\n ) {\n // There are a few known circumstances where we can open IndexedDb but\n // trying to read/write will fail (e.g. quota exceeded). For\n // well-understood cases, we attempt to detect these and then gracefully\n // fall back to memory persistence.\n // NOTE: Rather than continue to add to this list, we could decide to\n // always fall back, with the risk that we might accidentally hide errors\n // representing actual SDK bugs.\n return (\n // When the browser is out of quota we could get either quota exceeded\n // or an aborted error depending on whether the error happened during\n // schema migration.\n error.code === DOM_EXCEPTION_QUOTA_EXCEEDED ||\n error.code === DOM_EXCEPTION_ABORTED ||\n // Firefox Private Browsing mode disables IndexedDb and returns\n // INVALID_STATE for any usage.\n error.code === DOM_EXCEPTION_INVALID_STATE\n );\n }\n\n return true;\n }\n\n /**\n * Checks that the client has not been terminated. Ensures that other methods on\n * this class cannot be called after the client is terminated.\n */\n private verifyNotTerminated(): void {\n if (this.asyncQueue.isShuttingDown) {\n throw new FirestoreError(\n Code.FAILED_PRECONDITION,\n 'The client has already been terminated.'\n );\n }\n }\n\n private handleCredentialChange(user: User): Promise<void> {\n this.asyncQueue.verifyOperationInProgress();\n\n logDebug(LOG_TAG, 'Credential Changed. Current user: ' + user.uid);\n return this.syncEngine.handleCredentialChange(user);\n }\n\n /** Disables the network connection. Pending operations will not complete. */\n disableNetwork(): Promise<void> {\n this.verifyNotTerminated();\n return this.asyncQueue.enqueue(() => {\n return this.syncEngine.disableNetwork();\n });\n }\n\n terminate(): Promise<void> {\n return this.asyncQueue.enqueueAndInitiateShutdown(async () => {\n // PORTING NOTE: LocalStore does not need an explicit shutdown on web.\n if (this.gcScheduler) {\n this.gcScheduler.stop();\n }\n\n await this.remoteStore.shutdown();\n await this.sharedClientState.shutdown();\n await this.persistence.shutdown();\n\n // `removeChangeListener` must be called after shutting down the\n // RemoteStore as it will prevent the RemoteStore from retrieving\n // auth tokens.\n this.credentials.removeChangeListener();\n });\n }\n\n /**\n * Returns a Promise that resolves when all writes that were pending at the time this\n * method was called received server acknowledgement. An acknowledgement can be either acceptance\n * or rejection.\n */\n waitForPendingWrites(): Promise<void> {\n this.verifyNotTerminated();\n\n const deferred = new Deferred<void>();\n this.asyncQueue.enqueueAndForget(() => {\n return this.syncEngine.registerPendingWritesCallback(deferred);\n });\n return deferred.promise;\n }\n\n listen(\n query: Query,\n observer: Observer<ViewSnapshot>,\n options: ListenOptions\n ): QueryListener {\n this.verifyNotTerminated();\n const listener = new QueryListener(query, observer, options);\n this.asyncQueue.enqueueAndForget(() => this.eventMgr.listen(listener));\n return listener;\n }\n\n unlisten(listener: QueryListener): void {\n // Checks for termination but does not raise error, allowing unlisten after\n // termination to be a no-op.\n if (this.clientTerminated) {\n return;\n }\n this.asyncQueue.enqueueAndForget(() => {\n return this.eventMgr.unlisten(listener);\n });\n }\n\n async getDocumentFromLocalCache(\n docKey: DocumentKey\n ): Promise<Document | null> {\n this.verifyNotTerminated();\n const deferred = new Deferred<Document | null>();\n await this.asyncQueue.enqueue(async () => {\n try {\n const maybeDoc = await this.localStore.readDocument(docKey);\n if (maybeDoc instanceof Document) {\n deferred.resolve(maybeDoc);\n } else if (maybeDoc instanceof NoDocument) {\n deferred.resolve(null);\n } else {\n deferred.reject(\n new FirestoreError(\n Code.UNAVAILABLE,\n 'Failed to get document from cache. (However, this document may ' +\n \"exist on the server. Run again without setting 'source' in \" +\n 'the GetOptions to attempt to retrieve the document from the ' +\n 'server.)'\n )\n );\n }\n } catch (e) {\n const firestoreError = wrapInUserErrorIfRecoverable(\n e,\n `Failed to get document '${docKey} from cache`\n );\n deferred.reject(firestoreError);\n }\n });\n\n return deferred.promise;\n }\n\n async getDocumentsFromLocalCache(query: Query): Promise<ViewSnapshot> {\n this.verifyNotTerminated();\n const deferred = new Deferred<ViewSnapshot>();\n await this.asyncQueue.enqueue(async () => {\n try {\n const queryResult = await this.localStore.executeQuery(\n query,\n /* usePreviousResults= */ true\n );\n const view = new View(query, queryResult.remoteKeys);\n const viewDocChanges = view.computeDocChanges(queryResult.documents);\n const viewChange = view.applyChanges(\n viewDocChanges,\n /* updateLimboDocuments= */ false\n );\n deferred.resolve(viewChange.snapshot!);\n } catch (e) {\n const firestoreError = wrapInUserErrorIfRecoverable(\n e,\n `Failed to execute query '${query} against cache`\n );\n deferred.reject(firestoreError);\n }\n });\n return deferred.promise;\n }\n\n write(mutations: Mutation[]): Promise<void> {\n this.verifyNotTerminated();\n const deferred = new Deferred<void>();\n this.asyncQueue.enqueueAndForget(() =>\n this.syncEngine.write(mutations, deferred)\n );\n return deferred.promise;\n }\n\n databaseId(): DatabaseId {\n return this.databaseInfo.databaseId;\n }\n\n addSnapshotsInSyncListener(observer: Observer<void>): void {\n this.verifyNotTerminated();\n this.asyncQueue.enqueueAndForget(() => {\n this.eventMgr.addSnapshotsInSyncListener(observer);\n return Promise.resolve();\n });\n }\n\n removeSnapshotsInSyncListener(observer: Observer<void>): void {\n // Checks for shutdown but does not raise error, allowing remove after\n // shutdown to be a no-op.\n if (this.clientTerminated) {\n return;\n }\n this.asyncQueue.enqueueAndForget(() => {\n this.eventMgr.removeSnapshotsInSyncListener(observer);\n return Promise.resolve();\n });\n }\n\n get clientTerminated(): boolean {\n // Technically, the asyncQueue is still running, but only accepting operations\n // related to termination or supposed to be run after termination. It is effectively\n // terminated to the eyes of users.\n return this.asyncQueue.isShuttingDown;\n }\n\n transaction<T>(\n updateFunction: (transaction: Transaction) => Promise<T>\n ): Promise<T> {\n this.verifyNotTerminated();\n const deferred = new Deferred<T>();\n this.asyncQueue.enqueueAndForget(() => {\n this.syncEngine.runTransaction(this.asyncQueue, updateFunction, deferred);\n return Promise.resolve();\n });\n return deferred.promise;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Observer } from '../core/event_manager';\nimport { EventHandler } from './misc';\n\n/*\n * A wrapper implementation of Observer<T> that will dispatch events\n * asynchronously. To allow immediate silencing, a mute call is added which\n * causes events scheduled to no longer be raised.\n */\nexport class AsyncObserver<T> implements Observer<T> {\n /**\n * When set to true, will not raise future events. Necessary to deal with\n * async detachment of listener.\n */\n private muted = false;\n\n constructor(private observer: Observer<T>) {}\n\n next(value: T): void {\n this.scheduleEvent(this.observer.next, value);\n }\n\n error(error: Error): void {\n this.scheduleEvent(this.observer.error, error);\n }\n\n mute(): void {\n this.muted = true;\n }\n\n private scheduleEvent<E>(eventHandler: EventHandler<E>, event: E): void {\n if (!this.muted) {\n setTimeout(() => {\n if (!this.muted) {\n eventHandler(event);\n }\n }, 0);\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { JsonObject } from '../model/object_value';\n\n/**\n * Observer/Subscribe interfaces.\n */\nexport type NextFn<T> = (value: T) => void;\nexport type ErrorFn = (error: Error) => void;\nexport type CompleteFn = () => void;\n\n// Allow for any of the Observer methods to be undefined.\nexport interface PartialObserver<T> {\n next?: NextFn<T>;\n error?: ErrorFn;\n complete?: CompleteFn;\n}\n\nexport interface Unsubscribe {\n (): void;\n}\n\nexport function isPartialObserver(obj: unknown): boolean {\n return implementsAnyMethods(obj, ['next', 'error', 'complete']);\n}\n\n/**\n * Returns true if obj is an object and contains at least one of the specified\n * methods.\n */\nfunction implementsAnyMethods(obj: unknown, methods: string[]): boolean {\n if (typeof obj !== 'object' || obj === null) {\n return false;\n }\n\n const object = obj as JsonObject<unknown>;\n for (const method of methods) {\n if (method in object && typeof object[method] === 'function') {\n return true;\n }\n }\n return false;\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as firestore from '@firebase/firestore-types';\n\nimport * as api from '../protos/firestore_proto_api';\n\nimport { DocumentReference, Firestore } from './database';\nimport { Blob } from './blob';\nimport { GeoPoint } from './geo_point';\nimport { Timestamp } from './timestamp';\nimport { DatabaseId } from '../core/database_info';\nimport { DocumentKey } from '../model/document_key';\nimport {\n normalizeByteString,\n normalizeNumber,\n normalizeTimestamp,\n typeOrder\n} from '../model/values';\nimport {\n getLocalWriteTime,\n getPreviousValue\n} from '../model/server_timestamps';\nimport { fail, hardAssert } from '../util/assert';\nimport { forEach } from '../util/obj';\nimport { TypeOrder } from '../model/object_value';\nimport { ResourcePath } from '../model/path';\nimport { isValidResourceName } from '../remote/serializer';\nimport { logError } from '../util/log';\n\nexport type ServerTimestampBehavior = 'estimate' | 'previous' | 'none';\n\n/**\n * Converts Firestore's internal types to the JavaScript types that we expose\n * to the user.\n */\nexport class UserDataWriter<T = firestore.DocumentData> {\n constructor(\n private readonly firestore: Firestore,\n private readonly timestampsInSnapshots: boolean,\n private readonly serverTimestampBehavior?: ServerTimestampBehavior,\n private readonly converter?: firestore.FirestoreDataConverter<T>\n ) {}\n\n convertValue(value: api.Value): unknown {\n switch (typeOrder(value)) {\n case TypeOrder.NullValue:\n return null;\n case TypeOrder.BooleanValue:\n return value.booleanValue!;\n case TypeOrder.NumberValue:\n return normalizeNumber(value.integerValue || value.doubleValue);\n case TypeOrder.TimestampValue:\n return this.convertTimestamp(value.timestampValue!);\n case TypeOrder.ServerTimestampValue:\n return this.convertServerTimestamp(value);\n case TypeOrder.StringValue:\n return value.stringValue!;\n case TypeOrder.BlobValue:\n return new Blob(normalizeByteString(value.bytesValue!));\n case TypeOrder.RefValue:\n return this.convertReference(value.referenceValue!);\n case TypeOrder.GeoPointValue:\n return this.convertGeoPoint(value.geoPointValue!);\n case TypeOrder.ArrayValue:\n return this.convertArray(value.arrayValue!);\n case TypeOrder.ObjectValue:\n return this.convertObject(value.mapValue!);\n default:\n throw fail('Invalid value type: ' + JSON.stringify(value));\n }\n }\n\n private convertObject(mapValue: api.MapValue): firestore.DocumentData {\n const result: firestore.DocumentData = {};\n forEach(mapValue.fields || {}, (key, value) => {\n result[key] = this.convertValue(value);\n });\n return result;\n }\n\n private convertGeoPoint(value: api.LatLng): GeoPoint {\n return new GeoPoint(\n normalizeNumber(value.latitude),\n normalizeNumber(value.longitude)\n );\n }\n\n private convertArray(arrayValue: api.ArrayValue): unknown[] {\n return (arrayValue.values || []).map(value => this.convertValue(value));\n }\n\n private convertServerTimestamp(value: api.Value): unknown {\n switch (this.serverTimestampBehavior) {\n case 'previous':\n const previousValue = getPreviousValue(value);\n if (previousValue == null) {\n return null;\n }\n return this.convertValue(previousValue);\n case 'estimate':\n return this.convertTimestamp(getLocalWriteTime(value));\n default:\n return null;\n }\n }\n\n private convertTimestamp(value: api.Timestamp): Timestamp | Date {\n const normalizedValue = normalizeTimestamp(value);\n const timestamp = new Timestamp(\n normalizedValue.seconds,\n normalizedValue.nanos\n );\n if (this.timestampsInSnapshots) {\n return timestamp;\n } else {\n return timestamp.toDate();\n }\n }\n\n private convertReference(name: string): DocumentReference<T> {\n const resourcePath = ResourcePath.fromString(name);\n hardAssert(\n isValidResourceName(resourcePath),\n 'ReferenceValue is not valid ' + name\n );\n const databaseId = new DatabaseId(resourcePath.get(1), resourcePath.get(3));\n const key = new DocumentKey(resourcePath.popFirst(5));\n\n if (!databaseId.isEqual(this.firestore._databaseId)) {\n // TODO(b/64130202): Somehow support foreign references.\n logError(\n `Document ${key} contains a document ` +\n `reference within a different database (` +\n `${databaseId.projectId}/${databaseId.database}) which is not ` +\n `supported. It will be treated as a reference in the current ` +\n `database (${this.firestore._databaseId.projectId}/${this.firestore._databaseId.database}) ` +\n `instead.`\n );\n }\n\n return new DocumentReference(key, this.firestore, this.converter);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport * as firestore from '@firebase/firestore-types';\n\nimport * as api from '../protos/firestore_proto_api';\n\nimport { FirebaseApp } from '@firebase/app-types';\nimport { _FirebaseApp, FirebaseService } from '@firebase/app-types/private';\nimport { DatabaseId, DatabaseInfo } from '../core/database_info';\nimport { ListenOptions } from '../core/event_manager';\nimport {\n ComponentProvider,\n MemoryComponentProvider\n} from '../core/component_provider';\nimport { FirestoreClient, PersistenceSettings } from '../core/firestore_client';\nimport {\n Bound,\n Direction,\n FieldFilter,\n Filter,\n Operator,\n OrderBy,\n Query as InternalQuery\n} from '../core/query';\nimport { Transaction as InternalTransaction } from '../core/transaction';\nimport { ChangeType, ViewSnapshot } from '../core/view_snapshot';\nimport { LruParams } from '../local/lru_garbage_collector';\nimport { Document, MaybeDocument, NoDocument } from '../model/document';\nimport { DocumentKey } from '../model/document_key';\nimport { DeleteMutation, Mutation, Precondition } from '../model/mutation';\nimport { FieldPath, ResourcePath } from '../model/path';\nimport { isServerTimestamp } from '../model/server_timestamps';\nimport { refValue } from '../model/values';\nimport { PlatformSupport } from '../platform/platform';\nimport { debugAssert, fail } from '../util/assert';\nimport { AsyncObserver } from '../util/async_observer';\nimport { AsyncQueue } from '../util/async_queue';\nimport { Code, FirestoreError } from '../util/error';\nimport {\n invalidClassError,\n validateArgType,\n validateAtLeastNumberOfArgs,\n validateBetweenNumberOfArgs,\n validateDefined,\n validateExactNumberOfArgs,\n validateNamedOptionalPropertyEquals,\n validateNamedOptionalType,\n validateNamedType,\n validateOptionalArgType,\n validateOptionalArrayElements,\n validateOptionNames,\n validatePositiveNumber,\n validateStringEnum,\n valueDescription\n} from '../util/input_validation';\nimport { getLogLevel, logError, LogLevel, setLogLevel } from '../util/log';\nimport { AutoId } from '../util/misc';\nimport { Deferred, Rejecter, Resolver } from '../util/promise';\nimport { FieldPath as ExternalFieldPath } from './field_path';\n\nimport {\n CredentialsProvider,\n CredentialsSettings,\n EmptyCredentialsProvider,\n FirebaseCredentialsProvider,\n makeCredentialsProvider\n} from './credentials';\nimport {\n CompleteFn,\n ErrorFn,\n isPartialObserver,\n NextFn,\n PartialObserver,\n Unsubscribe\n} from './observer';\nimport { fieldPathFromArgument, UserDataReader } from './user_data_reader';\nimport { UserDataWriter } from './user_data_writer';\nimport { FirebaseAuthInternalName } from '@firebase/auth-interop-types';\nimport { Provider } from '@firebase/component';\n\n// settings() defaults:\nconst DEFAULT_HOST = 'firestore.googleapis.com';\nconst DEFAULT_SSL = true;\nconst DEFAULT_TIMESTAMPS_IN_SNAPSHOTS = true;\nconst DEFAULT_FORCE_LONG_POLLING = false;\nconst DEFAULT_IGNORE_UNDEFINED_PROPERTIES = false;\n\n/**\n * Constant used to indicate the LRU garbage collection should be disabled.\n * Set this value as the `cacheSizeBytes` on the settings passed to the\n * `Firestore` instance.\n */\nexport const CACHE_SIZE_UNLIMITED = LruParams.COLLECTION_DISABLED;\n\n// enablePersistence() defaults:\nconst DEFAULT_SYNCHRONIZE_TABS = false;\n\n/** Undocumented, private additional settings not exposed in our public API. */\ninterface PrivateSettings extends firestore.Settings {\n // Can be a google-auth-library or gapi client.\n credentials?: CredentialsSettings;\n}\n\n/**\n * Options that can be provided in the Firestore constructor when not using\n * Firebase (aka standalone mode).\n */\nexport interface FirestoreDatabase {\n projectId: string;\n database?: string;\n}\n\n/**\n * A concrete type describing all the values that can be applied via a\n * user-supplied firestore.Settings object. This is a separate type so that\n * defaults can be supplied and the value can be checked for equality.\n */\nclass FirestoreSettings {\n /** The hostname to connect to. */\n readonly host: string;\n\n /** Whether to use SSL when connecting. */\n readonly ssl: boolean;\n\n readonly timestampsInSnapshots: boolean;\n\n readonly cacheSizeBytes: number;\n\n readonly forceLongPolling: boolean;\n\n readonly ignoreUndefinedProperties: boolean;\n\n // Can be a google-auth-library or gapi client.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n credentials?: any;\n\n constructor(settings: PrivateSettings) {\n if (settings.host === undefined) {\n if (settings.ssl !== undefined) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n \"Can't provide ssl option if host option is not set\"\n );\n }\n this.host = DEFAULT_HOST;\n this.ssl = DEFAULT_SSL;\n } else {\n validateNamedType('settings', 'non-empty string', 'host', settings.host);\n this.host = settings.host;\n\n validateNamedOptionalType('settings', 'boolean', 'ssl', settings.ssl);\n this.ssl = settings.ssl ?? DEFAULT_SSL;\n }\n validateOptionNames('settings', settings, [\n 'host',\n 'ssl',\n 'credentials',\n 'timestampsInSnapshots',\n 'cacheSizeBytes',\n 'experimentalForceLongPolling',\n 'ignoreUndefinedProperties'\n ]);\n\n validateNamedOptionalType(\n 'settings',\n 'object',\n 'credentials',\n settings.credentials\n );\n this.credentials = settings.credentials;\n\n validateNamedOptionalType(\n 'settings',\n 'boolean',\n 'timestampsInSnapshots',\n settings.timestampsInSnapshots\n );\n\n validateNamedOptionalType(\n 'settings',\n 'boolean',\n 'ignoreUndefinedProperties',\n settings.ignoreUndefinedProperties\n );\n\n // Nobody should set timestampsInSnapshots anymore, but the error depends on\n // whether they set it to true or false...\n if (settings.timestampsInSnapshots === true) {\n logError(\n \"The setting 'timestampsInSnapshots: true' is no longer required \" +\n 'and should be removed.'\n );\n } else if (settings.timestampsInSnapshots === false) {\n logError(\n \"Support for 'timestampsInSnapshots: false' will be removed soon. \" +\n 'You must update your code to handle Timestamp objects.'\n );\n }\n this.timestampsInSnapshots =\n settings.timestampsInSnapshots ?? DEFAULT_TIMESTAMPS_IN_SNAPSHOTS;\n this.ignoreUndefinedProperties =\n settings.ignoreUndefinedProperties ?? DEFAULT_IGNORE_UNDEFINED_PROPERTIES;\n\n validateNamedOptionalType(\n 'settings',\n 'number',\n 'cacheSizeBytes',\n settings.cacheSizeBytes\n );\n if (settings.cacheSizeBytes === undefined) {\n this.cacheSizeBytes = LruParams.DEFAULT_CACHE_SIZE_BYTES;\n } else {\n if (\n settings.cacheSizeBytes !== CACHE_SIZE_UNLIMITED &&\n settings.cacheSizeBytes < LruParams.MINIMUM_CACHE_SIZE_BYTES\n ) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `cacheSizeBytes must be at least ${LruParams.MINIMUM_CACHE_SIZE_BYTES}`\n );\n } else {\n this.cacheSizeBytes = settings.cacheSizeBytes;\n }\n }\n\n validateNamedOptionalType(\n 'settings',\n 'boolean',\n 'experimentalForceLongPolling',\n settings.experimentalForceLongPolling\n );\n this.forceLongPolling =\n settings.experimentalForceLongPolling ?? DEFAULT_FORCE_LONG_POLLING;\n }\n\n isEqual(other: FirestoreSettings): boolean {\n return (\n this.host === other.host &&\n this.ssl === other.ssl &&\n this.timestampsInSnapshots === other.timestampsInSnapshots &&\n this.credentials === other.credentials &&\n this.cacheSizeBytes === other.cacheSizeBytes &&\n this.forceLongPolling === other.forceLongPolling &&\n this.ignoreUndefinedProperties === other.ignoreUndefinedProperties\n );\n }\n}\n\n/**\n * The root reference to the database.\n */\nexport class Firestore implements firestore.FirebaseFirestore, FirebaseService {\n // The objects that are a part of this API are exposed to third-parties as\n // compiled javascript so we want to flag our private members with a leading\n // underscore to discourage their use.\n readonly _databaseId: DatabaseId;\n private readonly _persistenceKey: string;\n private readonly _componentProvider: ComponentProvider;\n private _credentials: CredentialsProvider;\n private readonly _firebaseApp: FirebaseApp | null = null;\n private _settings: FirestoreSettings;\n\n // The firestore client instance. This will be available as soon as\n // configureClient is called, but any calls against it will block until\n // setup has completed.\n //\n // Operations on the _firestoreClient don't block on _firestoreReady. Those\n // are already set to synchronize on the async queue.\n private _firestoreClient: FirestoreClient | undefined;\n\n // Public for use in tests.\n // TODO(mikelehen): Use modularized initialization instead.\n readonly _queue = new AsyncQueue();\n\n _userDataReader: UserDataReader | undefined;\n\n // Note: We are using `MemoryComponentProvider` as a default\n // ComponentProvider to ensure backwards compatibility with the format\n // expected by the console build.\n constructor(\n databaseIdOrApp: FirestoreDatabase | FirebaseApp,\n authProvider: Provider<FirebaseAuthInternalName>,\n componentProvider: ComponentProvider = new MemoryComponentProvider()\n ) {\n if (typeof (databaseIdOrApp as FirebaseApp).options === 'object') {\n // This is very likely a Firebase app object\n // TODO(b/34177605): Can we somehow use instanceof?\n const app = databaseIdOrApp as FirebaseApp;\n this._firebaseApp = app;\n this._databaseId = Firestore.databaseIdFromApp(app);\n this._persistenceKey = app.name;\n this._credentials = new FirebaseCredentialsProvider(authProvider);\n } else {\n const external = databaseIdOrApp as FirestoreDatabase;\n if (!external.projectId) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Must provide projectId'\n );\n }\n\n this._databaseId = new DatabaseId(external.projectId, external.database);\n // Use a default persistenceKey that lines up with FirebaseApp.\n this._persistenceKey = '[DEFAULT]';\n this._credentials = new EmptyCredentialsProvider();\n }\n\n this._componentProvider = componentProvider;\n this._settings = new FirestoreSettings({});\n }\n\n get _dataReader(): UserDataReader {\n debugAssert(\n !!this._firestoreClient,\n 'Cannot obtain UserDataReader before instance is intitialized'\n );\n if (!this._userDataReader) {\n // Lazy initialize UserDataReader once the settings are frozen\n this._userDataReader = new UserDataReader(\n this._databaseId,\n this._settings.ignoreUndefinedProperties\n );\n }\n return this._userDataReader;\n }\n\n settings(settingsLiteral: firestore.Settings): void {\n validateExactNumberOfArgs('Firestore.settings', arguments, 1);\n validateArgType('Firestore.settings', 'object', 1, settingsLiteral);\n\n const newSettings = new FirestoreSettings(settingsLiteral);\n if (this._firestoreClient && !this._settings.isEqual(newSettings)) {\n throw new FirestoreError(\n Code.FAILED_PRECONDITION,\n 'Firestore has already been started and its settings can no longer ' +\n 'be changed. You can only call settings() before calling any other ' +\n 'methods on a Firestore object.'\n );\n }\n\n this._settings = newSettings;\n if (newSettings.credentials !== undefined) {\n this._credentials = makeCredentialsProvider(newSettings.credentials);\n }\n }\n\n enableNetwork(): Promise<void> {\n this.ensureClientConfigured();\n return this._firestoreClient!.enableNetwork();\n }\n\n disableNetwork(): Promise<void> {\n this.ensureClientConfigured();\n return this._firestoreClient!.disableNetwork();\n }\n\n enablePersistence(settings?: firestore.PersistenceSettings): Promise<void> {\n if (this._firestoreClient) {\n throw new FirestoreError(\n Code.FAILED_PRECONDITION,\n 'Firestore has already been started and persistence can no longer ' +\n 'be enabled. You can only call enablePersistence() before calling ' +\n 'any other methods on a Firestore object.'\n );\n }\n\n let synchronizeTabs = false;\n\n if (settings) {\n if (settings.experimentalTabSynchronization !== undefined) {\n logError(\n \"The 'experimentalTabSynchronization' setting will be removed. Use 'synchronizeTabs' instead.\"\n );\n }\n synchronizeTabs =\n settings.synchronizeTabs ??\n settings.experimentalTabSynchronization ??\n DEFAULT_SYNCHRONIZE_TABS;\n }\n\n return this.configureClient(this._componentProvider, {\n durable: true,\n cacheSizeBytes: this._settings.cacheSizeBytes,\n synchronizeTabs\n });\n }\n\n async clearPersistence(): Promise<void> {\n if (\n this._firestoreClient !== undefined &&\n !this._firestoreClient.clientTerminated\n ) {\n throw new FirestoreError(\n Code.FAILED_PRECONDITION,\n 'Persistence cannot be cleared after this Firestore instance is initialized.'\n );\n }\n\n const deferred = new Deferred<void>();\n this._queue.enqueueAndForgetEvenAfterShutdown(async () => {\n try {\n const databaseInfo = this.makeDatabaseInfo();\n await this._componentProvider.clearPersistence(databaseInfo);\n deferred.resolve();\n } catch (e) {\n deferred.reject(e);\n }\n });\n return deferred.promise;\n }\n\n terminate(): Promise<void> {\n (this.app as _FirebaseApp)._removeServiceInstance('firestore');\n return this.INTERNAL.delete();\n }\n\n get _isTerminated(): boolean {\n this.ensureClientConfigured();\n return this._firestoreClient!.clientTerminated;\n }\n\n waitForPendingWrites(): Promise<void> {\n this.ensureClientConfigured();\n return this._firestoreClient!.waitForPendingWrites();\n }\n\n onSnapshotsInSync(observer: PartialObserver<void>): Unsubscribe;\n onSnapshotsInSync(onSync: () => void): Unsubscribe;\n onSnapshotsInSync(arg: unknown): Unsubscribe {\n this.ensureClientConfigured();\n\n if (isPartialObserver(arg)) {\n return this.onSnapshotsInSyncInternal(arg as PartialObserver<void>);\n } else {\n validateArgType('Firestore.onSnapshotsInSync', 'function', 1, arg);\n const observer: PartialObserver<void> = {\n next: arg as () => void\n };\n return this.onSnapshotsInSyncInternal(observer);\n }\n }\n\n private onSnapshotsInSyncInternal(\n observer: PartialObserver<void>\n ): Unsubscribe {\n const errHandler = (err: Error): void => {\n throw fail('Uncaught Error in onSnapshotsInSync');\n };\n const asyncObserver = new AsyncObserver<void>({\n next: () => {\n if (observer.next) {\n observer.next();\n }\n },\n error: errHandler\n });\n this._firestoreClient!.addSnapshotsInSyncListener(asyncObserver);\n return () => {\n asyncObserver.mute();\n this._firestoreClient!.removeSnapshotsInSyncListener(asyncObserver);\n };\n }\n\n ensureClientConfigured(): FirestoreClient {\n if (!this._firestoreClient) {\n // Kick off starting the client but don't actually wait for it.\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.configureClient(new MemoryComponentProvider(), {\n durable: false\n });\n }\n return this._firestoreClient as FirestoreClient;\n }\n\n private makeDatabaseInfo(): DatabaseInfo {\n return new DatabaseInfo(\n this._databaseId,\n this._persistenceKey,\n this._settings.host,\n this._settings.ssl,\n this._settings.forceLongPolling\n );\n }\n\n private configureClient(\n componentProvider: ComponentProvider,\n persistenceSettings: PersistenceSettings\n ): Promise<void> {\n debugAssert(!!this._settings.host, 'FirestoreSettings.host is not set');\n\n debugAssert(\n !this._firestoreClient,\n 'configureClient() called multiple times'\n );\n\n const databaseInfo = this.makeDatabaseInfo();\n\n this._firestoreClient = new FirestoreClient(\n PlatformSupport.getPlatform(),\n databaseInfo,\n this._credentials,\n this._queue\n );\n\n return this._firestoreClient.start(componentProvider, persistenceSettings);\n }\n\n private static databaseIdFromApp(app: FirebaseApp): DatabaseId {\n if (!contains(app.options, 'projectId')) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n '\"projectId\" not provided in firebase.initializeApp.'\n );\n }\n\n const projectId = app.options.projectId;\n if (!projectId || typeof projectId !== 'string') {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'projectId must be a string in FirebaseApp.options'\n );\n }\n return new DatabaseId(projectId);\n }\n\n get app(): FirebaseApp {\n if (!this._firebaseApp) {\n throw new FirestoreError(\n Code.FAILED_PRECONDITION,\n \"Firestore was not initialized using the Firebase SDK. 'app' is \" +\n 'not available'\n );\n }\n return this._firebaseApp;\n }\n\n INTERNAL = {\n delete: async (): Promise<void> => {\n // The client must be initalized to ensure that all subsequent API usage\n // throws an exception.\n this.ensureClientConfigured();\n await this._firestoreClient!.terminate();\n }\n };\n\n collection(pathString: string): firestore.CollectionReference {\n validateExactNumberOfArgs('Firestore.collection', arguments, 1);\n validateArgType('Firestore.collection', 'non-empty string', 1, pathString);\n this.ensureClientConfigured();\n return new CollectionReference(ResourcePath.fromString(pathString), this);\n }\n\n doc(pathString: string): firestore.DocumentReference {\n validateExactNumberOfArgs('Firestore.doc', arguments, 1);\n validateArgType('Firestore.doc', 'non-empty string', 1, pathString);\n this.ensureClientConfigured();\n return DocumentReference.forPath(ResourcePath.fromString(pathString), this);\n }\n\n collectionGroup(collectionId: string): firestore.Query {\n validateExactNumberOfArgs('Firestore.collectionGroup', arguments, 1);\n validateArgType(\n 'Firestore.collectionGroup',\n 'non-empty string',\n 1,\n collectionId\n );\n if (collectionId.indexOf('/') >= 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid collection ID '${collectionId}' passed to function ` +\n `Firestore.collectionGroup(). Collection IDs must not contain '/'.`\n );\n }\n this.ensureClientConfigured();\n return new Query(\n new InternalQuery(ResourcePath.EMPTY_PATH, collectionId),\n this\n );\n }\n\n runTransaction<T>(\n updateFunction: (transaction: firestore.Transaction) => Promise<T>\n ): Promise<T> {\n validateExactNumberOfArgs('Firestore.runTransaction', arguments, 1);\n validateArgType('Firestore.runTransaction', 'function', 1, updateFunction);\n return this.ensureClientConfigured().transaction(\n (transaction: InternalTransaction) => {\n return updateFunction(new Transaction(this, transaction));\n }\n );\n }\n\n batch(): firestore.WriteBatch {\n this.ensureClientConfigured();\n\n return new WriteBatch(this);\n }\n\n static get logLevel(): firestore.LogLevel {\n switch (getLogLevel()) {\n case LogLevel.DEBUG:\n return 'debug';\n case LogLevel.SILENT:\n return 'silent';\n default:\n // The default log level is error\n return 'error';\n }\n }\n\n static setLogLevel(level: firestore.LogLevel): void {\n validateExactNumberOfArgs('Firestore.setLogLevel', arguments, 1);\n validateArgType('Firestore.setLogLevel', 'non-empty string', 1, level);\n switch (level) {\n case 'debug':\n setLogLevel(LogLevel.DEBUG);\n break;\n case 'error':\n setLogLevel(LogLevel.ERROR);\n break;\n case 'silent':\n setLogLevel(LogLevel.SILENT);\n break;\n default:\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid log level: ' + level\n );\n }\n }\n\n // Note: this is not a property because the minifier can't work correctly with\n // the way TypeScript compiler outputs properties.\n _areTimestampsInSnapshotsEnabled(): boolean {\n return this._settings.timestampsInSnapshots;\n }\n}\n\n/**\n * A reference to a transaction.\n */\nexport class Transaction implements firestore.Transaction {\n constructor(\n private _firestore: Firestore,\n private _transaction: InternalTransaction\n ) {}\n\n get<T>(\n documentRef: firestore.DocumentReference<T>\n ): Promise<firestore.DocumentSnapshot<T>> {\n validateExactNumberOfArgs('Transaction.get', arguments, 1);\n const ref = validateReference(\n 'Transaction.get',\n documentRef,\n this._firestore\n );\n return this._transaction\n .lookup([ref._key])\n .then((docs: MaybeDocument[]) => {\n if (!docs || docs.length !== 1) {\n return fail('Mismatch in docs returned from document lookup.');\n }\n const doc = docs[0];\n if (doc instanceof NoDocument) {\n return new DocumentSnapshot<T>(\n this._firestore,\n ref._key,\n null,\n /* fromCache= */ false,\n /* hasPendingWrites= */ false,\n ref._converter\n );\n } else if (doc instanceof Document) {\n return new DocumentSnapshot<T>(\n this._firestore,\n ref._key,\n doc,\n /* fromCache= */ false,\n /* hasPendingWrites= */ false,\n ref._converter\n );\n } else {\n throw fail(\n `BatchGetDocumentsRequest returned unexpected document type: ${doc.constructor.name}`\n );\n }\n });\n }\n\n set<T>(\n documentRef: firestore.DocumentReference<T>,\n value: T,\n options?: firestore.SetOptions\n ): Transaction {\n validateBetweenNumberOfArgs('Transaction.set', arguments, 2, 3);\n const ref = validateReference(\n 'Transaction.set',\n documentRef,\n this._firestore\n );\n options = validateSetOptions('Transaction.set', options);\n const [convertedValue, functionName] = applyFirestoreDataConverter(\n ref._converter,\n value,\n 'Transaction.set'\n );\n const parsed =\n options.merge || options.mergeFields\n ? this._firestore._dataReader.parseMergeData(\n functionName,\n convertedValue,\n options.mergeFields\n )\n : this._firestore._dataReader.parseSetData(\n functionName,\n convertedValue\n );\n this._transaction.set(ref._key, parsed);\n return this;\n }\n\n update(\n documentRef: firestore.DocumentReference<unknown>,\n value: firestore.UpdateData\n ): Transaction;\n update(\n documentRef: firestore.DocumentReference<unknown>,\n field: string | ExternalFieldPath,\n value: unknown,\n ...moreFieldsAndValues: unknown[]\n ): Transaction;\n update(\n documentRef: firestore.DocumentReference<unknown>,\n fieldOrUpdateData: string | ExternalFieldPath | firestore.UpdateData,\n value?: unknown,\n ...moreFieldsAndValues: unknown[]\n ): Transaction {\n let ref;\n let parsed;\n\n if (\n typeof fieldOrUpdateData === 'string' ||\n fieldOrUpdateData instanceof ExternalFieldPath\n ) {\n validateAtLeastNumberOfArgs('Transaction.update', arguments, 3);\n ref = validateReference(\n 'Transaction.update',\n documentRef,\n this._firestore\n );\n parsed = this._firestore._dataReader.parseUpdateVarargs(\n 'Transaction.update',\n fieldOrUpdateData,\n value,\n moreFieldsAndValues\n );\n } else {\n validateExactNumberOfArgs('Transaction.update', arguments, 2);\n ref = validateReference(\n 'Transaction.update',\n documentRef,\n this._firestore\n );\n parsed = this._firestore._dataReader.parseUpdateData(\n 'Transaction.update',\n fieldOrUpdateData\n );\n }\n\n this._transaction.update(ref._key, parsed);\n return this;\n }\n\n delete(documentRef: firestore.DocumentReference<unknown>): Transaction {\n validateExactNumberOfArgs('Transaction.delete', arguments, 1);\n const ref = validateReference(\n 'Transaction.delete',\n documentRef,\n this._firestore\n );\n this._transaction.delete(ref._key);\n return this;\n }\n}\n\nexport class WriteBatch implements firestore.WriteBatch {\n private _mutations = [] as Mutation[];\n private _committed = false;\n\n constructor(private _firestore: Firestore) {}\n\n set<T>(\n documentRef: firestore.DocumentReference<T>,\n value: T,\n options?: firestore.SetOptions\n ): WriteBatch {\n validateBetweenNumberOfArgs('WriteBatch.set', arguments, 2, 3);\n this.verifyNotCommitted();\n const ref = validateReference(\n 'WriteBatch.set',\n documentRef,\n this._firestore\n );\n options = validateSetOptions('WriteBatch.set', options);\n const [convertedValue, functionName] = applyFirestoreDataConverter(\n ref._converter,\n value,\n 'WriteBatch.set'\n );\n const parsed =\n options.merge || options.mergeFields\n ? this._firestore._dataReader.parseMergeData(\n functionName,\n convertedValue,\n options.mergeFields\n )\n : this._firestore._dataReader.parseSetData(\n functionName,\n convertedValue\n );\n this._mutations = this._mutations.concat(\n parsed.toMutations(ref._key, Precondition.none())\n );\n return this;\n }\n\n update(\n documentRef: firestore.DocumentReference<unknown>,\n value: firestore.UpdateData\n ): WriteBatch;\n update(\n documentRef: firestore.DocumentReference<unknown>,\n field: string | ExternalFieldPath,\n value: unknown,\n ...moreFieldsAndValues: unknown[]\n ): WriteBatch;\n update(\n documentRef: firestore.DocumentReference<unknown>,\n fieldOrUpdateData: string | ExternalFieldPath | firestore.UpdateData,\n value?: unknown,\n ...moreFieldsAndValues: unknown[]\n ): WriteBatch {\n this.verifyNotCommitted();\n\n let ref;\n let parsed;\n\n if (\n typeof fieldOrUpdateData === 'string' ||\n fieldOrUpdateData instanceof ExternalFieldPath\n ) {\n validateAtLeastNumberOfArgs('WriteBatch.update', arguments, 3);\n ref = validateReference(\n 'WriteBatch.update',\n documentRef,\n this._firestore\n );\n parsed = this._firestore._dataReader.parseUpdateVarargs(\n 'WriteBatch.update',\n fieldOrUpdateData,\n value,\n moreFieldsAndValues\n );\n } else {\n validateExactNumberOfArgs('WriteBatch.update', arguments, 2);\n ref = validateReference(\n 'WriteBatch.update',\n documentRef,\n this._firestore\n );\n parsed = this._firestore._dataReader.parseUpdateData(\n 'WriteBatch.update',\n fieldOrUpdateData\n );\n }\n\n this._mutations = this._mutations.concat(\n parsed.toMutations(ref._key, Precondition.exists(true))\n );\n return this;\n }\n\n delete(documentRef: firestore.DocumentReference<unknown>): WriteBatch {\n validateExactNumberOfArgs('WriteBatch.delete', arguments, 1);\n this.verifyNotCommitted();\n const ref = validateReference(\n 'WriteBatch.delete',\n documentRef,\n this._firestore\n );\n this._mutations = this._mutations.concat(\n new DeleteMutation(ref._key, Precondition.none())\n );\n return this;\n }\n\n commit(): Promise<void> {\n this.verifyNotCommitted();\n this._committed = true;\n if (this._mutations.length > 0) {\n return this._firestore.ensureClientConfigured().write(this._mutations);\n }\n\n return Promise.resolve();\n }\n\n private verifyNotCommitted(): void {\n if (this._committed) {\n throw new FirestoreError(\n Code.FAILED_PRECONDITION,\n 'A write batch can no longer be used after commit() ' +\n 'has been called.'\n );\n }\n }\n}\n\n/**\n * A reference to a particular document in a collection in the database.\n */\nexport class DocumentReference<T = firestore.DocumentData>\n implements firestore.DocumentReference<T> {\n private _firestoreClient: FirestoreClient;\n\n constructor(\n public _key: DocumentKey,\n readonly firestore: Firestore,\n readonly _converter?: firestore.FirestoreDataConverter<T>\n ) {\n this._firestoreClient = this.firestore.ensureClientConfigured();\n }\n\n static forPath<U>(\n path: ResourcePath,\n firestore: Firestore,\n converter?: firestore.FirestoreDataConverter<U>\n ): DocumentReference<U> {\n if (path.length % 2 !== 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid document reference. Document ' +\n 'references must have an even number of segments, but ' +\n `${path.canonicalString()} has ${path.length}`\n );\n }\n return new DocumentReference(new DocumentKey(path), firestore, converter);\n }\n\n get id(): string {\n return this._key.path.lastSegment();\n }\n\n get parent(): firestore.CollectionReference<T> {\n return new CollectionReference(\n this._key.path.popLast(),\n this.firestore,\n this._converter\n );\n }\n\n get path(): string {\n return this._key.path.canonicalString();\n }\n\n collection(\n pathString: string\n ): firestore.CollectionReference<firestore.DocumentData> {\n validateExactNumberOfArgs('DocumentReference.collection', arguments, 1);\n validateArgType(\n 'DocumentReference.collection',\n 'non-empty string',\n 1,\n pathString\n );\n if (!pathString) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Must provide a non-empty collection name to collection()'\n );\n }\n const path = ResourcePath.fromString(pathString);\n return new CollectionReference(this._key.path.child(path), this.firestore);\n }\n\n isEqual(other: firestore.DocumentReference<T>): boolean {\n if (!(other instanceof DocumentReference)) {\n throw invalidClassError('isEqual', 'DocumentReference', 1, other);\n }\n return (\n this.firestore === other.firestore &&\n this._key.isEqual(other._key) &&\n this._converter === other._converter\n );\n }\n\n set(\n value: firestore.DocumentData,\n options?: firestore.SetOptions\n ): Promise<void>;\n set(value: T, options?: firestore.SetOptions): Promise<void> {\n validateBetweenNumberOfArgs('DocumentReference.set', arguments, 1, 2);\n options = validateSetOptions('DocumentReference.set', options);\n const [convertedValue, functionName] = applyFirestoreDataConverter(\n this._converter,\n value,\n 'DocumentReference.set'\n );\n const parsed =\n options.merge || options.mergeFields\n ? this.firestore._dataReader.parseMergeData(\n functionName,\n convertedValue,\n options.mergeFields\n )\n : this.firestore._dataReader.parseSetData(functionName, convertedValue);\n return this._firestoreClient.write(\n parsed.toMutations(this._key, Precondition.none())\n );\n }\n\n update(value: firestore.UpdateData): Promise<void>;\n update(\n field: string | ExternalFieldPath,\n value: unknown,\n ...moreFieldsAndValues: unknown[]\n ): Promise<void>;\n update(\n fieldOrUpdateData: string | ExternalFieldPath | firestore.UpdateData,\n value?: unknown,\n ...moreFieldsAndValues: unknown[]\n ): Promise<void> {\n let parsed;\n\n if (\n typeof fieldOrUpdateData === 'string' ||\n fieldOrUpdateData instanceof ExternalFieldPath\n ) {\n validateAtLeastNumberOfArgs('DocumentReference.update', arguments, 2);\n parsed = this.firestore._dataReader.parseUpdateVarargs(\n 'DocumentReference.update',\n fieldOrUpdateData,\n value,\n moreFieldsAndValues\n );\n } else {\n validateExactNumberOfArgs('DocumentReference.update', arguments, 1);\n parsed = this.firestore._dataReader.parseUpdateData(\n 'DocumentReference.update',\n fieldOrUpdateData\n );\n }\n\n return this._firestoreClient.write(\n parsed.toMutations(this._key, Precondition.exists(true))\n );\n }\n\n delete(): Promise<void> {\n validateExactNumberOfArgs('DocumentReference.delete', arguments, 0);\n return this._firestoreClient.write([\n new DeleteMutation(this._key, Precondition.none())\n ]);\n }\n\n onSnapshot(\n observer: PartialObserver<firestore.DocumentSnapshot<T>>\n ): Unsubscribe;\n onSnapshot(\n options: firestore.SnapshotListenOptions,\n observer: PartialObserver<firestore.DocumentSnapshot<T>>\n ): Unsubscribe;\n onSnapshot(\n onNext: NextFn<firestore.DocumentSnapshot<T>>,\n onError?: ErrorFn,\n onCompletion?: CompleteFn\n ): Unsubscribe;\n onSnapshot(\n options: firestore.SnapshotListenOptions,\n onNext: NextFn<firestore.DocumentSnapshot<T>>,\n onError?: ErrorFn,\n onCompletion?: CompleteFn\n ): Unsubscribe;\n\n onSnapshot(...args: unknown[]): Unsubscribe {\n validateBetweenNumberOfArgs(\n 'DocumentReference.onSnapshot',\n arguments,\n 1,\n 4\n );\n let options: firestore.SnapshotListenOptions = {\n includeMetadataChanges: false\n };\n let observer: PartialObserver<firestore.DocumentSnapshot<T>>;\n let currArg = 0;\n if (\n typeof args[currArg] === 'object' &&\n !isPartialObserver(args[currArg])\n ) {\n options = args[currArg] as firestore.SnapshotListenOptions;\n validateOptionNames('DocumentReference.onSnapshot', options, [\n 'includeMetadataChanges'\n ]);\n validateNamedOptionalType(\n 'DocumentReference.onSnapshot',\n 'boolean',\n 'includeMetadataChanges',\n options.includeMetadataChanges\n );\n currArg++;\n }\n\n const internalOptions = {\n includeMetadataChanges: options.includeMetadataChanges\n };\n\n if (isPartialObserver(args[currArg])) {\n observer = args[currArg] as PartialObserver<\n firestore.DocumentSnapshot<T>\n >;\n } else {\n validateArgType(\n 'DocumentReference.onSnapshot',\n 'function',\n currArg,\n args[currArg]\n );\n validateOptionalArgType(\n 'DocumentReference.onSnapshot',\n 'function',\n currArg + 1,\n args[currArg + 1]\n );\n validateOptionalArgType(\n 'DocumentReference.onSnapshot',\n 'function',\n currArg + 2,\n args[currArg + 2]\n );\n observer = {\n next: args[currArg] as NextFn<firestore.DocumentSnapshot<T>>,\n error: args[currArg + 1] as ErrorFn,\n complete: args[currArg + 2] as CompleteFn\n };\n }\n return this.onSnapshotInternal(internalOptions, observer);\n }\n\n private onSnapshotInternal(\n options: ListenOptions,\n observer: PartialObserver<firestore.DocumentSnapshot<T>>\n ): Unsubscribe {\n let errHandler = (err: Error): void => {\n console.error('Uncaught Error in onSnapshot:', err);\n };\n if (observer.error) {\n errHandler = observer.error.bind(observer);\n }\n\n const asyncObserver = new AsyncObserver<ViewSnapshot>({\n next: snapshot => {\n if (observer.next) {\n debugAssert(\n snapshot.docs.size <= 1,\n 'Too many documents returned on a document query'\n );\n const doc = snapshot.docs.get(this._key);\n\n observer.next(\n new DocumentSnapshot(\n this.firestore,\n this._key,\n doc,\n snapshot.fromCache,\n snapshot.hasPendingWrites,\n this._converter\n )\n );\n }\n },\n error: errHandler\n });\n const internalListener = this._firestoreClient.listen(\n InternalQuery.atPath(this._key.path),\n asyncObserver,\n options\n );\n\n return () => {\n asyncObserver.mute();\n this._firestoreClient.unlisten(internalListener);\n };\n }\n\n get(options?: firestore.GetOptions): Promise<firestore.DocumentSnapshot<T>> {\n validateBetweenNumberOfArgs('DocumentReference.get', arguments, 0, 1);\n validateGetOptions('DocumentReference.get', options);\n return new Promise(\n (resolve: Resolver<firestore.DocumentSnapshot<T>>, reject: Rejecter) => {\n if (options && options.source === 'cache') {\n this.firestore\n .ensureClientConfigured()\n .getDocumentFromLocalCache(this._key)\n .then(doc => {\n resolve(\n new DocumentSnapshot(\n this.firestore,\n this._key,\n doc,\n /*fromCache=*/ true,\n doc instanceof Document ? doc.hasLocalMutations : false,\n this._converter\n )\n );\n }, reject);\n } else {\n this.getViaSnapshotListener(resolve, reject, options);\n }\n }\n );\n }\n\n private getViaSnapshotListener(\n resolve: Resolver<firestore.DocumentSnapshot<T>>,\n reject: Rejecter,\n options?: firestore.GetOptions\n ): void {\n const unlisten = this.onSnapshotInternal(\n {\n includeMetadataChanges: true,\n waitForSyncWhenOnline: true\n },\n {\n next: (snap: firestore.DocumentSnapshot<T>) => {\n // Remove query first before passing event to user to avoid\n // user actions affecting the now stale query.\n unlisten();\n\n if (!snap.exists && snap.metadata.fromCache) {\n // TODO(dimond): If we're online and the document doesn't\n // exist then we resolve with a doc.exists set to false. If\n // we're offline however, we reject the Promise in this\n // case. Two options: 1) Cache the negative response from\n // the server so we can deliver that even when you're\n // offline 2) Actually reject the Promise in the online case\n // if the document doesn't exist.\n reject(\n new FirestoreError(\n Code.UNAVAILABLE,\n 'Failed to get document because the client is ' + 'offline.'\n )\n );\n } else if (\n snap.exists &&\n snap.metadata.fromCache &&\n options &&\n options.source === 'server'\n ) {\n reject(\n new FirestoreError(\n Code.UNAVAILABLE,\n 'Failed to get document from server. (However, this ' +\n 'document does exist in the local cache. Run again ' +\n 'without setting source to \"server\" to ' +\n 'retrieve the cached document.)'\n )\n );\n } else {\n resolve(snap);\n }\n },\n error: reject\n }\n );\n }\n\n withConverter<U>(\n converter: firestore.FirestoreDataConverter<U>\n ): firestore.DocumentReference<U> {\n return new DocumentReference<U>(this._key, this.firestore, converter);\n }\n}\n\nclass SnapshotMetadata implements firestore.SnapshotMetadata {\n constructor(\n readonly hasPendingWrites: boolean,\n readonly fromCache: boolean\n ) {}\n\n isEqual(other: firestore.SnapshotMetadata): boolean {\n return (\n this.hasPendingWrites === other.hasPendingWrites &&\n this.fromCache === other.fromCache\n );\n }\n}\n\n/**\n * Options interface that can be provided to configure the deserialization of\n * DocumentSnapshots.\n */\nexport interface SnapshotOptions extends firestore.SnapshotOptions {}\n\nexport class DocumentSnapshot<T = firestore.DocumentData>\n implements firestore.DocumentSnapshot<T> {\n constructor(\n private _firestore: Firestore,\n private _key: DocumentKey,\n public _document: Document | null,\n private _fromCache: boolean,\n private _hasPendingWrites: boolean,\n private readonly _converter?: firestore.FirestoreDataConverter<T>\n ) {}\n\n data(options?: firestore.SnapshotOptions): T | undefined {\n validateBetweenNumberOfArgs('DocumentSnapshot.data', arguments, 0, 1);\n options = validateSnapshotOptions('DocumentSnapshot.data', options);\n if (!this._document) {\n return undefined;\n } else {\n // We only want to use the converter and create a new DocumentSnapshot\n // if a converter has been provided.\n if (this._converter) {\n const snapshot = new QueryDocumentSnapshot(\n this._firestore,\n this._key,\n this._document,\n this._fromCache,\n this._hasPendingWrites\n );\n return this._converter.fromFirestore(snapshot, options);\n } else {\n const userDataWriter = new UserDataWriter(\n this._firestore,\n this._firestore._areTimestampsInSnapshotsEnabled(),\n options.serverTimestamps,\n /* converter= */ undefined\n );\n return userDataWriter.convertValue(this._document.toProto()) as T;\n }\n }\n }\n\n get(\n fieldPath: string | ExternalFieldPath,\n options?: firestore.SnapshotOptions\n ): unknown {\n validateBetweenNumberOfArgs('DocumentSnapshot.get', arguments, 1, 2);\n options = validateSnapshotOptions('DocumentSnapshot.get', options);\n if (this._document) {\n const value = this._document\n .data()\n .field(fieldPathFromArgument('DocumentSnapshot.get', fieldPath));\n if (value !== null) {\n const userDataWriter = new UserDataWriter(\n this._firestore,\n this._firestore._areTimestampsInSnapshotsEnabled(),\n options.serverTimestamps,\n this._converter\n );\n return userDataWriter.convertValue(value);\n }\n }\n return undefined;\n }\n\n get id(): string {\n return this._key.path.lastSegment();\n }\n\n get ref(): firestore.DocumentReference<T> {\n return new DocumentReference<T>(\n this._key,\n this._firestore,\n this._converter\n );\n }\n\n get exists(): boolean {\n return this._document !== null;\n }\n\n get metadata(): firestore.SnapshotMetadata {\n return new SnapshotMetadata(this._hasPendingWrites, this._fromCache);\n }\n\n isEqual(other: firestore.DocumentSnapshot<T>): boolean {\n if (!(other instanceof DocumentSnapshot)) {\n throw invalidClassError('isEqual', 'DocumentSnapshot', 1, other);\n }\n return (\n this._firestore === other._firestore &&\n this._fromCache === other._fromCache &&\n this._key.isEqual(other._key) &&\n (this._document === null\n ? other._document === null\n : this._document.isEqual(other._document)) &&\n this._converter === other._converter\n );\n }\n}\n\nexport class QueryDocumentSnapshot<T = firestore.DocumentData>\n extends DocumentSnapshot<T>\n implements firestore.QueryDocumentSnapshot<T> {\n data(options?: SnapshotOptions): T {\n const data = super.data(options);\n debugAssert(\n data !== undefined,\n 'Document in a QueryDocumentSnapshot should exist'\n );\n return data;\n }\n}\n\nexport class Query<T = firestore.DocumentData> implements firestore.Query<T> {\n constructor(\n public _query: InternalQuery,\n readonly firestore: Firestore,\n protected readonly _converter?: firestore.FirestoreDataConverter<T>\n ) {}\n\n where(\n field: string | ExternalFieldPath,\n opStr: firestore.WhereFilterOp,\n value: unknown\n ): firestore.Query<T> {\n validateExactNumberOfArgs('Query.where', arguments, 3);\n validateDefined('Query.where', 3, value);\n\n // Enumerated from the WhereFilterOp type in index.d.ts.\n const whereFilterOpEnums = [\n Operator.LESS_THAN,\n Operator.LESS_THAN_OR_EQUAL,\n Operator.EQUAL,\n Operator.GREATER_THAN_OR_EQUAL,\n Operator.GREATER_THAN,\n Operator.ARRAY_CONTAINS,\n Operator.IN,\n Operator.ARRAY_CONTAINS_ANY\n ];\n const op = validateStringEnum('Query.where', whereFilterOpEnums, 2, opStr);\n\n let fieldValue: api.Value;\n const fieldPath = fieldPathFromArgument('Query.where', field);\n if (fieldPath.isKeyField()) {\n if (\n op === Operator.ARRAY_CONTAINS ||\n op === Operator.ARRAY_CONTAINS_ANY\n ) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid Query. You can't perform '${op}' ` +\n 'queries on FieldPath.documentId().'\n );\n } else if (op === Operator.IN) {\n this.validateDisjunctiveFilterElements(value, op);\n const referenceList: api.Value[] = [];\n for (const arrayValue of value as api.Value[]) {\n referenceList.push(this.parseDocumentIdValue(arrayValue));\n }\n fieldValue = { arrayValue: { values: referenceList } };\n } else {\n fieldValue = this.parseDocumentIdValue(value);\n }\n } else {\n if (op === Operator.IN || op === Operator.ARRAY_CONTAINS_ANY) {\n this.validateDisjunctiveFilterElements(value, op);\n }\n fieldValue = this.firestore._dataReader.parseQueryValue(\n 'Query.where',\n value,\n // We only allow nested arrays for IN queries.\n /** allowArrays = */ op === Operator.IN\n );\n }\n const filter = FieldFilter.create(fieldPath, op, fieldValue);\n this.validateNewFilter(filter);\n return new Query(\n this._query.addFilter(filter),\n this.firestore,\n this._converter\n );\n }\n\n orderBy(\n field: string | ExternalFieldPath,\n directionStr?: firestore.OrderByDirection\n ): firestore.Query<T> {\n validateBetweenNumberOfArgs('Query.orderBy', arguments, 1, 2);\n validateOptionalArgType(\n 'Query.orderBy',\n 'non-empty string',\n 2,\n directionStr\n );\n let direction: Direction;\n if (directionStr === undefined || directionStr === 'asc') {\n direction = Direction.ASCENDING;\n } else if (directionStr === 'desc') {\n direction = Direction.DESCENDING;\n } else {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Function Query.orderBy() has unknown direction '${directionStr}', ` +\n `expected 'asc' or 'desc'.`\n );\n }\n if (this._query.startAt !== null) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid query. You must not call Query.startAt() or ' +\n 'Query.startAfter() before calling Query.orderBy().'\n );\n }\n if (this._query.endAt !== null) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid query. You must not call Query.endAt() or ' +\n 'Query.endBefore() before calling Query.orderBy().'\n );\n }\n const fieldPath = fieldPathFromArgument('Query.orderBy', field);\n const orderBy = new OrderBy(fieldPath, direction);\n this.validateNewOrderBy(orderBy);\n return new Query(\n this._query.addOrderBy(orderBy),\n this.firestore,\n this._converter\n );\n }\n\n limit(n: number): firestore.Query<T> {\n validateExactNumberOfArgs('Query.limit', arguments, 1);\n validateArgType('Query.limit', 'number', 1, n);\n validatePositiveNumber('Query.limit', 1, n);\n return new Query(\n this._query.withLimitToFirst(n),\n this.firestore,\n this._converter\n );\n }\n\n limitToLast(n: number): firestore.Query<T> {\n validateExactNumberOfArgs('Query.limitToLast', arguments, 1);\n validateArgType('Query.limitToLast', 'number', 1, n);\n validatePositiveNumber('Query.limitToLast', 1, n);\n return new Query(\n this._query.withLimitToLast(n),\n this.firestore,\n this._converter\n );\n }\n\n startAt(\n docOrField: unknown | firestore.DocumentSnapshot<unknown>,\n ...fields: unknown[]\n ): firestore.Query<T> {\n validateAtLeastNumberOfArgs('Query.startAt', arguments, 1);\n const bound = this.boundFromDocOrFields(\n 'Query.startAt',\n docOrField,\n fields,\n /*before=*/ true\n );\n return new Query(\n this._query.withStartAt(bound),\n this.firestore,\n this._converter\n );\n }\n\n startAfter(\n docOrField: unknown | firestore.DocumentSnapshot<unknown>,\n ...fields: unknown[]\n ): firestore.Query<T> {\n validateAtLeastNumberOfArgs('Query.startAfter', arguments, 1);\n const bound = this.boundFromDocOrFields(\n 'Query.startAfter',\n docOrField,\n fields,\n /*before=*/ false\n );\n return new Query(\n this._query.withStartAt(bound),\n this.firestore,\n this._converter\n );\n }\n\n endBefore(\n docOrField: unknown | firestore.DocumentSnapshot<unknown>,\n ...fields: unknown[]\n ): firestore.Query<T> {\n validateAtLeastNumberOfArgs('Query.endBefore', arguments, 1);\n const bound = this.boundFromDocOrFields(\n 'Query.endBefore',\n docOrField,\n fields,\n /*before=*/ true\n );\n return new Query(\n this._query.withEndAt(bound),\n this.firestore,\n this._converter\n );\n }\n\n endAt(\n docOrField: unknown | firestore.DocumentSnapshot<unknown>,\n ...fields: unknown[]\n ): firestore.Query<T> {\n validateAtLeastNumberOfArgs('Query.endAt', arguments, 1);\n const bound = this.boundFromDocOrFields(\n 'Query.endAt',\n docOrField,\n fields,\n /*before=*/ false\n );\n return new Query(\n this._query.withEndAt(bound),\n this.firestore,\n this._converter\n );\n }\n\n isEqual(other: firestore.Query<T>): boolean {\n if (!(other instanceof Query)) {\n throw invalidClassError('isEqual', 'Query', 1, other);\n }\n return (\n this.firestore === other.firestore && this._query.isEqual(other._query)\n );\n }\n\n withConverter<U>(\n converter: firestore.FirestoreDataConverter<U>\n ): firestore.Query<U> {\n return new Query<U>(this._query, this.firestore, converter);\n }\n\n /** Helper function to create a bound from a document or fields */\n private boundFromDocOrFields(\n methodName: string,\n docOrField: unknown | firestore.DocumentSnapshot<T>,\n fields: unknown[],\n before: boolean\n ): Bound {\n validateDefined(methodName, 1, docOrField);\n if (docOrField instanceof DocumentSnapshot) {\n if (fields.length > 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Too many arguments provided to ${methodName}().`\n );\n }\n const snap = docOrField;\n if (!snap.exists) {\n throw new FirestoreError(\n Code.NOT_FOUND,\n `Can't use a DocumentSnapshot that doesn't exist for ` +\n `${methodName}().`\n );\n }\n return this.boundFromDocument(snap._document!, before);\n } else {\n const allFields = [docOrField].concat(fields);\n return this.boundFromFields(methodName, allFields, before);\n }\n }\n\n /**\n * Create a Bound from a query and a document.\n *\n * Note that the Bound will always include the key of the document\n * and so only the provided document will compare equal to the returned\n * position.\n *\n * Will throw if the document does not contain all fields of the order by\n * of the query or if any of the fields in the order by are an uncommitted\n * server timestamp.\n */\n private boundFromDocument(doc: Document, before: boolean): Bound {\n const components: api.Value[] = [];\n\n // Because people expect to continue/end a query at the exact document\n // provided, we need to use the implicit sort order rather than the explicit\n // sort order, because it's guaranteed to contain the document key. That way\n // the position becomes unambiguous and the query continues/ends exactly at\n // the provided document. Without the key (by using the explicit sort\n // orders), multiple documents could match the position, yielding duplicate\n // results.\n for (const orderBy of this._query.orderBy) {\n if (orderBy.field.isKeyField()) {\n components.push(refValue(this.firestore._databaseId, doc.key));\n } else {\n const value = doc.field(orderBy.field);\n if (isServerTimestamp(value)) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid query. You are trying to start or end a query using a ' +\n 'document for which the field \"' +\n orderBy.field +\n '\" is an uncommitted server timestamp. (Since the value of ' +\n 'this field is unknown, you cannot start/end a query with it.)'\n );\n } else if (value !== null) {\n components.push(value);\n } else {\n const field = orderBy.field.canonicalString();\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid query. You are trying to start or end a query using a ` +\n `document for which the field '${field}' (used as the ` +\n `orderBy) does not exist.`\n );\n }\n }\n }\n return new Bound(components, before);\n }\n\n /**\n * Converts a list of field values to a Bound for the given query.\n */\n private boundFromFields(\n methodName: string,\n values: unknown[],\n before: boolean\n ): Bound {\n // Use explicit order by's because it has to match the query the user made\n const orderBy = this._query.explicitOrderBy;\n if (values.length > orderBy.length) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Too many arguments provided to ${methodName}(). ` +\n `The number of arguments must be less than or equal to the ` +\n `number of Query.orderBy() clauses`\n );\n }\n\n const components: api.Value[] = [];\n for (let i = 0; i < values.length; i++) {\n const rawValue = values[i];\n const orderByComponent = orderBy[i];\n if (orderByComponent.field.isKeyField()) {\n if (typeof rawValue !== 'string') {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid query. Expected a string for document ID in ` +\n `${methodName}(), but got a ${typeof rawValue}`\n );\n }\n if (\n !this._query.isCollectionGroupQuery() &&\n rawValue.indexOf('/') !== -1\n ) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid query. When querying a collection and ordering by FieldPath.documentId(), ` +\n `the value passed to ${methodName}() must be a plain document ID, but ` +\n `'${rawValue}' contains a slash.`\n );\n }\n const path = this._query.path.child(ResourcePath.fromString(rawValue));\n if (!DocumentKey.isDocumentKey(path)) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid query. When querying a collection group and ordering by ` +\n `FieldPath.documentId(), the value passed to ${methodName}() must result in a ` +\n `valid document path, but '${path}' is not because it contains an odd number ` +\n `of segments.`\n );\n }\n const key = new DocumentKey(path);\n components.push(refValue(this.firestore._databaseId, key));\n } else {\n const wrapped = this.firestore._dataReader.parseQueryValue(\n methodName,\n rawValue\n );\n components.push(wrapped);\n }\n }\n\n return new Bound(components, before);\n }\n\n onSnapshot(\n observer: PartialObserver<firestore.QuerySnapshot<T>>\n ): Unsubscribe;\n onSnapshot(\n options: firestore.SnapshotListenOptions,\n observer: PartialObserver<firestore.QuerySnapshot<T>>\n ): Unsubscribe;\n onSnapshot(\n onNext: NextFn<firestore.QuerySnapshot<T>>,\n onError?: ErrorFn,\n onCompletion?: CompleteFn\n ): Unsubscribe;\n onSnapshot(\n options: firestore.SnapshotListenOptions,\n onNext: NextFn<firestore.QuerySnapshot<T>>,\n onError?: ErrorFn,\n onCompletion?: CompleteFn\n ): Unsubscribe;\n\n onSnapshot(...args: unknown[]): Unsubscribe {\n validateBetweenNumberOfArgs('Query.onSnapshot', arguments, 1, 4);\n let options: firestore.SnapshotListenOptions = {};\n let observer: PartialObserver<firestore.QuerySnapshot<T>>;\n let currArg = 0;\n if (\n typeof args[currArg] === 'object' &&\n !isPartialObserver(args[currArg])\n ) {\n options = args[currArg] as firestore.SnapshotListenOptions;\n validateOptionNames('Query.onSnapshot', options, [\n 'includeMetadataChanges'\n ]);\n validateNamedOptionalType(\n 'Query.onSnapshot',\n 'boolean',\n 'includeMetadataChanges',\n options.includeMetadataChanges\n );\n currArg++;\n }\n\n if (isPartialObserver(args[currArg])) {\n observer = args[currArg] as PartialObserver<firestore.QuerySnapshot<T>>;\n } else {\n validateArgType('Query.onSnapshot', 'function', currArg, args[currArg]);\n validateOptionalArgType(\n 'Query.onSnapshot',\n 'function',\n currArg + 1,\n args[currArg + 1]\n );\n validateOptionalArgType(\n 'Query.onSnapshot',\n 'function',\n currArg + 2,\n args[currArg + 2]\n );\n observer = {\n next: args[currArg] as NextFn<firestore.QuerySnapshot<T>>,\n error: args[currArg + 1] as ErrorFn,\n complete: args[currArg + 2] as CompleteFn\n };\n }\n this.validateHasExplicitOrderByForLimitToLast(this._query);\n return this.onSnapshotInternal(options, observer);\n }\n\n private onSnapshotInternal(\n options: ListenOptions,\n observer: PartialObserver<firestore.QuerySnapshot<T>>\n ): Unsubscribe {\n let errHandler = (err: Error): void => {\n console.error('Uncaught Error in onSnapshot:', err);\n };\n if (observer.error) {\n errHandler = observer.error.bind(observer);\n }\n\n const asyncObserver = new AsyncObserver<ViewSnapshot>({\n next: (result: ViewSnapshot): void => {\n if (observer.next) {\n observer.next(\n new QuerySnapshot(\n this.firestore,\n this._query,\n result,\n this._converter\n )\n );\n }\n },\n error: errHandler\n });\n\n const firestoreClient = this.firestore.ensureClientConfigured();\n const internalListener = firestoreClient.listen(\n this._query,\n asyncObserver,\n options\n );\n return (): void => {\n asyncObserver.mute();\n firestoreClient.unlisten(internalListener);\n };\n }\n\n private validateHasExplicitOrderByForLimitToLast(query: InternalQuery): void {\n if (query.hasLimitToLast() && query.explicitOrderBy.length === 0) {\n throw new FirestoreError(\n Code.UNIMPLEMENTED,\n 'limitToLast() queries require specifying at least one orderBy() clause'\n );\n }\n }\n\n get(options?: firestore.GetOptions): Promise<firestore.QuerySnapshot<T>> {\n validateBetweenNumberOfArgs('Query.get', arguments, 0, 1);\n validateGetOptions('Query.get', options);\n this.validateHasExplicitOrderByForLimitToLast(this._query);\n return new Promise(\n (resolve: Resolver<firestore.QuerySnapshot<T>>, reject: Rejecter) => {\n if (options && options.source === 'cache') {\n this.firestore\n .ensureClientConfigured()\n .getDocumentsFromLocalCache(this._query)\n .then((viewSnap: ViewSnapshot) => {\n resolve(\n new QuerySnapshot(\n this.firestore,\n this._query,\n viewSnap,\n this._converter\n )\n );\n }, reject);\n } else {\n this.getViaSnapshotListener(resolve, reject, options);\n }\n }\n );\n }\n\n private getViaSnapshotListener(\n resolve: Resolver<firestore.QuerySnapshot<T>>,\n reject: Rejecter,\n options?: firestore.GetOptions\n ): void {\n const unlisten = this.onSnapshotInternal(\n {\n includeMetadataChanges: true,\n waitForSyncWhenOnline: true\n },\n {\n next: (result: firestore.QuerySnapshot<T>) => {\n // Remove query first before passing event to user to avoid\n // user actions affecting the now stale query.\n unlisten();\n\n if (\n result.metadata.fromCache &&\n options &&\n options.source === 'server'\n ) {\n reject(\n new FirestoreError(\n Code.UNAVAILABLE,\n 'Failed to get documents from server. (However, these ' +\n 'documents may exist in the local cache. Run again ' +\n 'without setting source to \"server\" to ' +\n 'retrieve the cached documents.)'\n )\n );\n } else {\n resolve(result);\n }\n },\n error: reject\n }\n );\n }\n\n /**\n * Parses the given documentIdValue into a ReferenceValue, throwing\n * appropriate errors if the value is anything other than a DocumentReference\n * or String, or if the string is malformed.\n */\n private parseDocumentIdValue(documentIdValue: unknown): api.Value {\n if (typeof documentIdValue === 'string') {\n if (documentIdValue === '') {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid query. When querying with FieldPath.documentId(), you ' +\n 'must provide a valid document ID, but it was an empty string.'\n );\n }\n if (\n !this._query.isCollectionGroupQuery() &&\n documentIdValue.indexOf('/') !== -1\n ) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid query. When querying a collection by ` +\n `FieldPath.documentId(), you must provide a plain document ID, but ` +\n `'${documentIdValue}' contains a '/' character.`\n );\n }\n const path = this._query.path.child(\n ResourcePath.fromString(documentIdValue)\n );\n if (!DocumentKey.isDocumentKey(path)) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid query. When querying a collection group by ` +\n `FieldPath.documentId(), the value provided must result in a valid document path, ` +\n `but '${path}' is not because it has an odd number of segments (${path.length}).`\n );\n }\n return refValue(this.firestore._databaseId, new DocumentKey(path));\n } else if (documentIdValue instanceof DocumentReference) {\n const ref = documentIdValue as DocumentReference<T>;\n return refValue(this.firestore._databaseId, ref._key);\n } else {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid query. When querying with FieldPath.documentId(), you must provide a valid ` +\n `string or a DocumentReference, but it was: ` +\n `${valueDescription(documentIdValue)}.`\n );\n }\n }\n\n /**\n * Validates that the value passed into a disjunctrive filter satisfies all\n * array requirements.\n */\n private validateDisjunctiveFilterElements(\n value: unknown,\n operator: Operator\n ): void {\n if (!Array.isArray(value) || value.length === 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid Query. A non-empty array is required for ' +\n `'${operator.toString()}' filters.`\n );\n }\n if (value.length > 10) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid Query. '${operator.toString()}' filters support a ` +\n 'maximum of 10 elements in the value array.'\n );\n }\n if (value.indexOf(null) >= 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid Query. '${operator.toString()}' filters cannot contain 'null' ` +\n 'in the value array.'\n );\n }\n if (value.filter(element => Number.isNaN(element)).length > 0) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid Query. '${operator.toString()}' filters cannot contain 'NaN' ` +\n 'in the value array.'\n );\n }\n }\n\n private validateNewFilter(filter: Filter): void {\n if (filter instanceof FieldFilter) {\n const arrayOps = [Operator.ARRAY_CONTAINS, Operator.ARRAY_CONTAINS_ANY];\n const disjunctiveOps = [Operator.IN, Operator.ARRAY_CONTAINS_ANY];\n const isArrayOp = arrayOps.indexOf(filter.op) >= 0;\n const isDisjunctiveOp = disjunctiveOps.indexOf(filter.op) >= 0;\n\n if (filter.isInequality()) {\n const existingField = this._query.getInequalityFilterField();\n if (existingField !== null && !existingField.isEqual(filter.field)) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid query. All where filters with an inequality' +\n ' (<, <=, >, or >=) must be on the same field. But you have' +\n ` inequality filters on '${existingField.toString()}'` +\n ` and '${filter.field.toString()}'`\n );\n }\n\n const firstOrderByField = this._query.getFirstOrderByField();\n if (firstOrderByField !== null) {\n this.validateOrderByAndInequalityMatch(\n filter.field,\n firstOrderByField\n );\n }\n } else if (isDisjunctiveOp || isArrayOp) {\n // You can have at most 1 disjunctive filter and 1 array filter. Check if\n // the new filter conflicts with an existing one.\n let conflictingOp: Operator | null = null;\n if (isDisjunctiveOp) {\n conflictingOp = this._query.findFilterOperator(disjunctiveOps);\n }\n if (conflictingOp === null && isArrayOp) {\n conflictingOp = this._query.findFilterOperator(arrayOps);\n }\n if (conflictingOp != null) {\n // We special case when it's a duplicate op to give a slightly clearer error message.\n if (conflictingOp === filter.op) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid query. You cannot use more than one ' +\n `'${filter.op.toString()}' filter.`\n );\n } else {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid query. You cannot use '${filter.op.toString()}' filters ` +\n `with '${conflictingOp.toString()}' filters.`\n );\n }\n }\n }\n }\n }\n\n private validateNewOrderBy(orderBy: OrderBy): void {\n if (this._query.getFirstOrderByField() === null) {\n // This is the first order by. It must match any inequality.\n const inequalityField = this._query.getInequalityFilterField();\n if (inequalityField !== null) {\n this.validateOrderByAndInequalityMatch(inequalityField, orderBy.field);\n }\n }\n }\n\n private validateOrderByAndInequalityMatch(\n inequality: FieldPath,\n orderBy: FieldPath\n ): void {\n if (!orderBy.isEqual(inequality)) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid query. You have a where filter with an inequality ` +\n `(<, <=, >, or >=) on field '${inequality.toString()}' ` +\n `and so you must also use '${inequality.toString()}' ` +\n `as your first Query.orderBy(), but your first Query.orderBy() ` +\n `is on field '${orderBy.toString()}' instead.`\n );\n }\n }\n}\n\nexport class QuerySnapshot<T = firestore.DocumentData>\n implements firestore.QuerySnapshot<T> {\n private _cachedChanges: Array<firestore.DocumentChange<T>> | null = null;\n private _cachedChangesIncludeMetadataChanges: boolean | null = null;\n\n readonly metadata: firestore.SnapshotMetadata;\n\n constructor(\n private readonly _firestore: Firestore,\n private readonly _originalQuery: InternalQuery,\n private readonly _snapshot: ViewSnapshot,\n private readonly _converter?: firestore.FirestoreDataConverter<T>\n ) {\n this.metadata = new SnapshotMetadata(\n _snapshot.hasPendingWrites,\n _snapshot.fromCache\n );\n }\n\n get docs(): Array<firestore.QueryDocumentSnapshot<T>> {\n const result: Array<firestore.QueryDocumentSnapshot<T>> = [];\n this.forEach(doc => result.push(doc));\n return result;\n }\n\n get empty(): boolean {\n return this._snapshot.docs.isEmpty();\n }\n\n get size(): number {\n return this._snapshot.docs.size;\n }\n\n forEach(\n callback: (result: firestore.QueryDocumentSnapshot<T>) => void,\n thisArg?: unknown\n ): void {\n validateBetweenNumberOfArgs('QuerySnapshot.forEach', arguments, 1, 2);\n validateArgType('QuerySnapshot.forEach', 'function', 1, callback);\n this._snapshot.docs.forEach(doc => {\n callback.call(thisArg, this.convertToDocumentImpl(doc));\n });\n }\n\n get query(): firestore.Query<T> {\n return new Query(this._originalQuery, this._firestore, this._converter);\n }\n\n docChanges(\n options?: firestore.SnapshotListenOptions\n ): Array<firestore.DocumentChange<T>> {\n if (options) {\n validateOptionNames('QuerySnapshot.docChanges', options, [\n 'includeMetadataChanges'\n ]);\n validateNamedOptionalType(\n 'QuerySnapshot.docChanges',\n 'boolean',\n 'includeMetadataChanges',\n options.includeMetadataChanges\n );\n }\n\n const includeMetadataChanges = !!(\n options && options.includeMetadataChanges\n );\n\n if (includeMetadataChanges && this._snapshot.excludesMetadataChanges) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'To include metadata changes with your document changes, you must ' +\n 'also pass { includeMetadataChanges:true } to onSnapshot().'\n );\n }\n\n if (\n !this._cachedChanges ||\n this._cachedChangesIncludeMetadataChanges !== includeMetadataChanges\n ) {\n this._cachedChanges = changesFromSnapshot<T>(\n this._firestore,\n includeMetadataChanges,\n this._snapshot,\n this._converter\n );\n this._cachedChangesIncludeMetadataChanges = includeMetadataChanges;\n }\n\n return this._cachedChanges;\n }\n\n /** Check the equality. The call can be very expensive. */\n isEqual(other: firestore.QuerySnapshot<T>): boolean {\n if (!(other instanceof QuerySnapshot)) {\n throw invalidClassError('isEqual', 'QuerySnapshot', 1, other);\n }\n\n return (\n this._firestore === other._firestore &&\n this._originalQuery.isEqual(other._originalQuery) &&\n this._snapshot.isEqual(other._snapshot) &&\n this._converter === other._converter\n );\n }\n\n private convertToDocumentImpl(doc: Document): QueryDocumentSnapshot<T> {\n return new QueryDocumentSnapshot(\n this._firestore,\n doc.key,\n doc,\n this.metadata.fromCache,\n this._snapshot.mutatedKeys.has(doc.key),\n this._converter\n );\n }\n}\n\nexport class CollectionReference<T = firestore.DocumentData> extends Query<T>\n implements firestore.CollectionReference<T> {\n constructor(\n readonly _path: ResourcePath,\n firestore: Firestore,\n _converter?: firestore.FirestoreDataConverter<T>\n ) {\n super(InternalQuery.atPath(_path), firestore, _converter);\n if (_path.length % 2 !== 1) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Invalid collection reference. Collection ' +\n 'references must have an odd number of segments, but ' +\n `${_path.canonicalString()} has ${_path.length}`\n );\n }\n }\n\n get id(): string {\n return this._query.path.lastSegment();\n }\n\n get parent(): firestore.DocumentReference<firestore.DocumentData> | null {\n const parentPath = this._query.path.popLast();\n if (parentPath.isEmpty()) {\n return null;\n } else {\n return new DocumentReference<firestore.DocumentData>(\n new DocumentKey(parentPath),\n this.firestore\n );\n }\n }\n\n get path(): string {\n return this._query.path.canonicalString();\n }\n\n doc(pathString?: string): firestore.DocumentReference<T> {\n validateBetweenNumberOfArgs('CollectionReference.doc', arguments, 0, 1);\n // We allow omission of 'pathString' but explicitly prohibit passing in both\n // 'undefined' and 'null'.\n if (arguments.length === 0) {\n pathString = AutoId.newId();\n }\n validateArgType(\n 'CollectionReference.doc',\n 'non-empty string',\n 1,\n pathString\n );\n if (pathString === '') {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Document path must be a non-empty string'\n );\n }\n const path = ResourcePath.fromString(pathString!);\n return DocumentReference.forPath<T>(\n this._query.path.child(path),\n this.firestore,\n this._converter\n );\n }\n\n add(value: T): Promise<firestore.DocumentReference<T>> {\n validateExactNumberOfArgs('CollectionReference.add', arguments, 1);\n const convertedValue = this._converter\n ? this._converter.toFirestore(value)\n : value;\n validateArgType('CollectionReference.add', 'object', 1, convertedValue);\n const docRef = this.doc();\n return docRef.set(value).then(() => docRef);\n }\n\n withConverter<U>(\n converter: firestore.FirestoreDataConverter<U>\n ): firestore.CollectionReference<U> {\n return new CollectionReference<U>(this._path, this.firestore, converter);\n }\n}\n\nfunction validateSetOptions(\n methodName: string,\n options: firestore.SetOptions | undefined\n): firestore.SetOptions {\n if (options === undefined) {\n return {\n merge: false\n };\n }\n\n validateOptionNames(methodName, options, ['merge', 'mergeFields']);\n validateNamedOptionalType(methodName, 'boolean', 'merge', options.merge);\n validateOptionalArrayElements(\n methodName,\n 'mergeFields',\n 'a string or a FieldPath',\n options.mergeFields,\n element =>\n typeof element === 'string' || element instanceof ExternalFieldPath\n );\n\n if (options.mergeFields !== undefined && options.merge !== undefined) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n `Invalid options passed to function ${methodName}(): You cannot specify both \"merge\" ` +\n `and \"mergeFields\".`\n );\n }\n\n return options;\n}\n\nfunction validateSnapshotOptions(\n methodName: string,\n options: firestore.SnapshotOptions | undefined\n): firestore.SnapshotOptions {\n if (options === undefined) {\n return {};\n }\n\n validateOptionNames(methodName, options, ['serverTimestamps']);\n validateNamedOptionalPropertyEquals(\n methodName,\n 'options',\n 'serverTimestamps',\n options.serverTimestamps,\n ['estimate', 'previous', 'none']\n );\n return options;\n}\n\nfunction validateGetOptions(\n methodName: string,\n options: firestore.GetOptions | undefined\n): void {\n validateOptionalArgType(methodName, 'object', 1, options);\n if (options) {\n validateOptionNames(methodName, options, ['source']);\n validateNamedOptionalPropertyEquals(\n methodName,\n 'options',\n 'source',\n options.source,\n ['default', 'server', 'cache']\n );\n }\n}\n\nfunction validateReference<T>(\n methodName: string,\n documentRef: firestore.DocumentReference<T>,\n firestore: Firestore\n): DocumentReference<T> {\n if (!(documentRef instanceof DocumentReference)) {\n throw invalidClassError(methodName, 'DocumentReference', 1, documentRef);\n } else if (documentRef.firestore !== firestore) {\n throw new FirestoreError(\n Code.INVALID_ARGUMENT,\n 'Provided document reference is from a different Firestore instance.'\n );\n } else {\n return documentRef;\n }\n}\n\n/**\n * Calculates the array of firestore.DocumentChange's for a given ViewSnapshot.\n *\n * Exported for testing.\n */\nexport function changesFromSnapshot<T>(\n firestore: Firestore,\n includeMetadataChanges: boolean,\n snapshot: ViewSnapshot,\n converter?: firestore.FirestoreDataConverter<T>\n): Array<firestore.DocumentChange<T>> {\n if (snapshot.oldDocs.isEmpty()) {\n // Special case the first snapshot because index calculation is easy and\n // fast\n let lastDoc: Document;\n let index = 0;\n return snapshot.docChanges.map(change => {\n const doc = new QueryDocumentSnapshot<T>(\n firestore,\n change.doc.key,\n change.doc,\n snapshot.fromCache,\n snapshot.mutatedKeys.has(change.doc.key),\n converter\n );\n debugAssert(\n change.type === ChangeType.Added,\n 'Invalid event type for first snapshot'\n );\n debugAssert(\n !lastDoc || snapshot.query.docComparator(lastDoc, change.doc) < 0,\n 'Got added events in wrong order'\n );\n lastDoc = change.doc;\n return {\n type: 'added' as firestore.DocumentChangeType,\n doc,\n oldIndex: -1,\n newIndex: index++\n };\n });\n } else {\n // A DocumentSet that is updated incrementally as changes are applied to use\n // to lookup the index of a document.\n let indexTracker = snapshot.oldDocs;\n return snapshot.docChanges\n .filter(\n change => includeMetadataChanges || change.type !== ChangeType.Metadata\n )\n .map(change => {\n const doc = new QueryDocumentSnapshot<T>(\n firestore,\n change.doc.key,\n change.doc,\n snapshot.fromCache,\n snapshot.mutatedKeys.has(change.doc.key),\n converter\n );\n let oldIndex = -1;\n let newIndex = -1;\n if (change.type !== ChangeType.Added) {\n oldIndex = indexTracker.indexOf(change.doc.key);\n debugAssert(oldIndex >= 0, 'Index for document not found');\n indexTracker = indexTracker.delete(change.doc.key);\n }\n if (change.type !== ChangeType.Removed) {\n indexTracker = indexTracker.add(change.doc);\n newIndex = indexTracker.indexOf(change.doc.key);\n }\n return { type: resultChangeType(change.type), doc, oldIndex, newIndex };\n });\n }\n}\n\nfunction resultChangeType(type: ChangeType): firestore.DocumentChangeType {\n switch (type) {\n case ChangeType.Added:\n return 'added';\n case ChangeType.Modified:\n case ChangeType.Metadata:\n return 'modified';\n case ChangeType.Removed:\n return 'removed';\n default:\n return fail('Unknown change type: ' + type);\n }\n}\n\n/**\n * Converts custom model object of type T into DocumentData by applying the\n * converter if it exists.\n *\n * This function is used when converting user objects to DocumentData\n * because we want to provide the user with a more specific error message if\n * their set() or fails due to invalid data originating from a toFirestore()\n * call.\n */\nfunction applyFirestoreDataConverter<T>(\n converter: firestore.FirestoreDataConverter<T> | undefined,\n value: T,\n functionName: string\n): [firestore.DocumentData, string] {\n let convertedValue;\n if (converter) {\n convertedValue = converter.toFirestore(value);\n functionName = 'toFirestore() in ' + functionName;\n } else {\n convertedValue = value as firestore.DocumentData;\n }\n return [convertedValue, functionName];\n}\n\nfunction contains(obj: object, key: string): obj is { key: unknown } {\n return Object.prototype.hasOwnProperty.call(obj, key);\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Code, FirestoreError } from './error';\n\n/**\n * Helper function to prevent instantiation through the constructor.\n *\n * This method creates a new constructor that throws when it's invoked.\n * The prototype of that constructor is then set to the prototype of the hidden\n * \"class\" to expose all the prototype methods and allow for instanceof\n * checks.\n *\n * To also make all the static methods available, all properties of the\n * original constructor are copied to the new constructor.\n */\nexport function makeConstructorPrivate<T extends Function>(\n cls: T,\n optionalMessage?: string\n): T {\n function PublicConstructor(): never {\n let error = 'This constructor is private.';\n if (optionalMessage) {\n error += ' ';\n error += optionalMessage;\n }\n throw new FirestoreError(Code.INVALID_ARGUMENT, error);\n }\n\n // Make sure instanceof checks work and all methods are exposed on the public\n // constructor\n PublicConstructor.prototype = cls.prototype;\n\n // Copy any static methods/members\n Object.assign(PublicConstructor, cls);\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return PublicConstructor as any;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { FirebaseApp, FirebaseNamespace } from '@firebase/app-types';\nimport { FirebaseAuthInternalName } from '@firebase/auth-interop-types';\nimport { _FirebaseNamespace } from '@firebase/app-types/private';\nimport { Component, ComponentType, Provider } from '@firebase/component';\nimport {\n CACHE_SIZE_UNLIMITED,\n Firestore,\n DocumentReference,\n DocumentSnapshot,\n QueryDocumentSnapshot,\n Query,\n QuerySnapshot,\n CollectionReference,\n Transaction,\n WriteBatch\n} from '../api/database';\nimport { Blob } from '../api/blob';\nimport { FieldPath } from '../api/field_path';\nimport { GeoPoint } from '../api/geo_point';\nimport { Timestamp } from '../api/timestamp';\nimport { makeConstructorPrivate } from '../util/api';\nimport { FieldValue } from '../api/field_value';\n\n// Public instance that disallows construction at runtime. Note that this still\n// allows instanceof checks.\nexport const PublicFirestore = makeConstructorPrivate(\n Firestore,\n 'Use firebase.firestore() instead.'\n);\nexport const PublicTransaction = makeConstructorPrivate(\n Transaction,\n 'Use firebase.firestore().runTransaction() instead.'\n);\nexport const PublicWriteBatch = makeConstructorPrivate(\n WriteBatch,\n 'Use firebase.firestore().batch() instead.'\n);\nexport const PublicDocumentReference = makeConstructorPrivate(\n DocumentReference,\n 'Use firebase.firestore().doc() instead.'\n);\nexport const PublicDocumentSnapshot = makeConstructorPrivate(DocumentSnapshot);\nexport const PublicQueryDocumentSnapshot = makeConstructorPrivate(\n QueryDocumentSnapshot\n);\nexport const PublicQuery = makeConstructorPrivate(Query);\nexport const PublicQuerySnapshot = makeConstructorPrivate(QuerySnapshot);\nexport const PublicCollectionReference = makeConstructorPrivate(\n CollectionReference,\n 'Use firebase.firestore().collection() instead.'\n);\nexport const PublicFieldValue = makeConstructorPrivate(\n FieldValue,\n 'Use FieldValue.<field>() instead.'\n);\nexport const PublicBlob = makeConstructorPrivate(\n Blob,\n 'Use Blob.fromUint8Array() or Blob.fromBase64String() instead.'\n);\n\nconst firestoreNamespace = {\n Firestore: PublicFirestore,\n GeoPoint,\n Timestamp,\n Blob: PublicBlob,\n Transaction: PublicTransaction,\n WriteBatch: PublicWriteBatch,\n DocumentReference: PublicDocumentReference,\n DocumentSnapshot: PublicDocumentSnapshot,\n Query: PublicQuery,\n QueryDocumentSnapshot: PublicQueryDocumentSnapshot,\n QuerySnapshot: PublicQuerySnapshot,\n CollectionReference: PublicCollectionReference,\n FieldPath,\n FieldValue: PublicFieldValue,\n setLogLevel: Firestore.setLogLevel,\n CACHE_SIZE_UNLIMITED\n};\n\n/**\n * Configures Firestore as part of the Firebase SDK by calling registerService.\n *\n * @param firebase The FirebaseNamespace to register Firestore with\n * @param firestoreFactory A factory function that returns a new Firestore\n * instance.\n */\nexport function configureForFirebase(\n firebase: FirebaseNamespace,\n firestoreFactory: (\n app: FirebaseApp,\n auth: Provider<FirebaseAuthInternalName>\n ) => Firestore\n): void {\n (firebase as _FirebaseNamespace).INTERNAL.registerComponent(\n new Component(\n 'firestore',\n container => {\n const app = container.getProvider('app').getImmediate()!;\n return firestoreFactory(app, container.getProvider('auth-internal'));\n },\n ComponentType.PUBLIC\n ).setServiceProps({ ...firestoreNamespace })\n );\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ConnectivityMonitor, NetworkStatus } from './connectivity_monitor';\n\nexport class NoopConnectivityMonitor implements ConnectivityMonitor {\n addCallback(callback: (status: NetworkStatus) => void): void {\n // No-op.\n }\n\n shutdown(): void {\n // No-op.\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { logDebug } from '../util/log';\nimport {\n ConnectivityMonitor,\n ConnectivityMonitorCallback,\n NetworkStatus\n} from './../remote/connectivity_monitor';\n\n// References to `window` are guarded by BrowserConnectivityMonitor.isAvailable()\n/* eslint-disable no-restricted-globals */\n\nconst LOG_TAG = 'ConnectivityMonitor';\n\n/**\n * Browser implementation of ConnectivityMonitor.\n */\nexport class BrowserConnectivityMonitor implements ConnectivityMonitor {\n private readonly networkAvailableListener = (): void =>\n this.onNetworkAvailable();\n private readonly networkUnavailableListener = (): void =>\n this.onNetworkUnavailable();\n private callbacks: ConnectivityMonitorCallback[] = [];\n\n constructor() {\n this.configureNetworkMonitoring();\n }\n\n addCallback(callback: (status: NetworkStatus) => void): void {\n this.callbacks.push(callback);\n }\n\n shutdown(): void {\n window.removeEventListener('online', this.networkAvailableListener);\n window.removeEventListener('offline', this.networkUnavailableListener);\n }\n\n private configureNetworkMonitoring(): void {\n window.addEventListener('online', this.networkAvailableListener);\n window.addEventListener('offline', this.networkUnavailableListener);\n }\n\n private onNetworkAvailable(): void {\n logDebug(LOG_TAG, 'Network connectivity changed: AVAILABLE');\n for (const callback of this.callbacks) {\n callback(NetworkStatus.AVAILABLE);\n }\n }\n\n private onNetworkUnavailable(): void {\n logDebug(LOG_TAG, 'Network connectivity changed: UNAVAILABLE');\n for (const callback of this.callbacks) {\n callback(NetworkStatus.UNAVAILABLE);\n }\n }\n\n // TODO(chenbrian): Consider passing in window either into this component or\n // here for testing via FakeWindow.\n /** Checks that all used attributes of window are available. */\n static isAvailable(): boolean {\n return (\n typeof window !== 'undefined' &&\n window.addEventListener !== undefined &&\n window.removeEventListener !== undefined\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { debugAssert } from '../util/assert';\nimport { FirestoreError } from '../util/error';\n\nimport { Stream } from './connection';\n\n/**\n * Provides a simple helper class that implements the Stream interface to\n * bridge to other implementations that are streams but do not implement the\n * interface. The stream callbacks are invoked with the callOn... methods.\n */\nexport class StreamBridge<I, O> implements Stream<I, O> {\n private wrappedOnOpen: (() => void) | undefined;\n private wrappedOnClose: ((err?: FirestoreError) => void) | undefined;\n private wrappedOnMessage: ((msg: O) => void) | undefined;\n\n private sendFn: (msg: I) => void;\n private closeFn: () => void;\n\n constructor(args: { sendFn: (msg: I) => void; closeFn: () => void }) {\n this.sendFn = args.sendFn;\n this.closeFn = args.closeFn;\n }\n\n onOpen(callback: () => void): void {\n debugAssert(!this.wrappedOnOpen, 'Called onOpen on stream twice!');\n this.wrappedOnOpen = callback;\n }\n\n onClose(callback: (err?: FirestoreError) => void): void {\n debugAssert(!this.wrappedOnClose, 'Called onClose on stream twice!');\n this.wrappedOnClose = callback;\n }\n\n onMessage(callback: (msg: O) => void): void {\n debugAssert(!this.wrappedOnMessage, 'Called onMessage on stream twice!');\n this.wrappedOnMessage = callback;\n }\n\n close(): void {\n this.closeFn();\n }\n\n send(msg: I): void {\n this.sendFn(msg);\n }\n\n callOnOpen(): void {\n debugAssert(\n this.wrappedOnOpen !== undefined,\n 'Cannot call onOpen because no callback was set'\n );\n this.wrappedOnOpen();\n }\n\n callOnClose(err?: FirestoreError): void {\n debugAssert(\n this.wrappedOnClose !== undefined,\n 'Cannot call onClose because no callback was set'\n );\n this.wrappedOnClose(err);\n }\n\n callOnMessage(msg: O): void {\n debugAssert(\n this.wrappedOnMessage !== undefined,\n 'Cannot call onMessage because no callback was set'\n );\n this.wrappedOnMessage(msg);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n createWebChannelTransport,\n ErrorCode,\n EventType,\n WebChannel,\n WebChannelError,\n WebChannelOptions,\n XhrIo\n} from '@firebase/webchannel-wrapper';\n\nimport {\n isBrowserExtension,\n isElectron,\n isIE,\n isMobileCordova,\n isReactNative,\n isUWP\n} from '@firebase/util';\n\nimport { Token } from '../api/credentials';\nimport { DatabaseId, DatabaseInfo } from '../core/database_info';\nimport { SDK_VERSION } from '../core/version';\nimport { Connection, Stream } from '../remote/connection';\nimport {\n mapCodeFromRpcStatus,\n mapCodeFromHttpResponseErrorStatus\n} from '../remote/rpc_error';\nimport { StreamBridge } from '../remote/stream_bridge';\nimport { debugAssert, fail, hardAssert } from '../util/assert';\nimport { Code, FirestoreError } from '../util/error';\nimport { logDebug } from '../util/log';\nimport { Indexable } from '../util/misc';\nimport { Rejecter, Resolver } from '../util/promise';\nimport { StringMap } from '../util/types';\n\nconst LOG_TAG = 'Connection';\n\nconst RPC_STREAM_SERVICE = 'google.firestore.v1.Firestore';\nconst RPC_URL_VERSION = 'v1';\n\n/**\n * Maps RPC names to the corresponding REST endpoint name.\n * Uses Object Literal notation to avoid renaming.\n */\nconst RPC_NAME_REST_MAPPING: { [key: string]: string } = {};\nRPC_NAME_REST_MAPPING['BatchGetDocuments'] = 'batchGet';\nRPC_NAME_REST_MAPPING['Commit'] = 'commit';\n\n// TODO(b/38203344): The SDK_VERSION is set independently from Firebase because\n// we are doing out-of-band releases. Once we release as part of Firebase, we\n// should use the Firebase version instead.\nconst X_GOOG_API_CLIENT_VALUE = 'gl-js/ fire/' + SDK_VERSION;\n\nconst XHR_TIMEOUT_SECS = 15;\n\nexport class WebChannelConnection implements Connection {\n private readonly databaseId: DatabaseId;\n private readonly baseUrl: string;\n private readonly forceLongPolling: boolean;\n\n constructor(info: DatabaseInfo) {\n this.databaseId = info.databaseId;\n const proto = info.ssl ? 'https' : 'http';\n this.baseUrl = proto + '://' + info.host;\n this.forceLongPolling = info.forceLongPolling;\n }\n\n /**\n * Modifies the headers for a request, adding any authorization token if\n * present and any additional headers for the request.\n */\n private modifyHeadersForRequest(\n headers: StringMap,\n token: Token | null\n ): void {\n if (token) {\n for (const header in token.authHeaders) {\n if (token.authHeaders.hasOwnProperty(header)) {\n headers[header] = token.authHeaders[header];\n }\n }\n }\n headers['X-Goog-Api-Client'] = X_GOOG_API_CLIENT_VALUE;\n }\n\n invokeRPC<Req, Resp>(\n rpcName: string,\n request: Req,\n token: Token | null\n ): Promise<Resp> {\n const url = this.makeUrl(rpcName);\n\n return new Promise((resolve: Resolver<Resp>, reject: Rejecter) => {\n const xhr = new XhrIo();\n xhr.listenOnce(EventType.COMPLETE, () => {\n try {\n switch (xhr.getLastErrorCode()) {\n case ErrorCode.NO_ERROR:\n const json = xhr.getResponseJson() as Resp;\n logDebug(LOG_TAG, 'XHR received:', JSON.stringify(json));\n resolve(json);\n break;\n case ErrorCode.TIMEOUT:\n logDebug(LOG_TAG, 'RPC \"' + rpcName + '\" timed out');\n reject(\n new FirestoreError(Code.DEADLINE_EXCEEDED, 'Request time out')\n );\n break;\n case ErrorCode.HTTP_ERROR:\n const status = xhr.getStatus();\n logDebug(\n LOG_TAG,\n 'RPC \"' + rpcName + '\" failed with status:',\n status,\n 'response text:',\n xhr.getResponseText()\n );\n if (status > 0) {\n const responseError = (xhr.getResponseJson() as WebChannelError)\n .error;\n if (\n !!responseError &&\n !!responseError.status &&\n !!responseError.message\n ) {\n const firestoreErrorCode = mapCodeFromHttpResponseErrorStatus(\n responseError.status\n );\n reject(\n new FirestoreError(\n firestoreErrorCode,\n responseError.message\n )\n );\n } else {\n reject(\n new FirestoreError(\n Code.UNKNOWN,\n 'Server responded with status ' + xhr.getStatus()\n )\n );\n }\n } else {\n // If we received an HTTP_ERROR but there's no status code,\n // it's most probably a connection issue\n logDebug(LOG_TAG, 'RPC \"' + rpcName + '\" failed');\n reject(\n new FirestoreError(Code.UNAVAILABLE, 'Connection failed.')\n );\n }\n break;\n default:\n fail(\n 'RPC \"' +\n rpcName +\n '\" failed with unanticipated ' +\n 'webchannel error ' +\n xhr.getLastErrorCode() +\n ': ' +\n xhr.getLastError() +\n ', giving up.'\n );\n }\n } finally {\n logDebug(LOG_TAG, 'RPC \"' + rpcName + '\" completed.');\n }\n });\n\n // The database field is already encoded in URL. Specifying it again in\n // the body is not necessary in production, and will cause duplicate field\n // errors in the Firestore Emulator. Let's remove it.\n const jsonObj = ({ ...request } as unknown) as Indexable;\n delete jsonObj.database;\n\n const requestString = JSON.stringify(jsonObj);\n logDebug(LOG_TAG, 'XHR sending: ', url + ' ' + requestString);\n // Content-Type: text/plain will avoid preflight requests which might\n // mess with CORS and redirects by proxies. If we add custom headers\n // we will need to change this code to potentially use the\n // $httpOverwrite parameter supported by ESF to avoid\n // triggering preflight requests.\n const headers: StringMap = { 'Content-Type': 'text/plain' };\n\n this.modifyHeadersForRequest(headers, token);\n\n xhr.send(url, 'POST', requestString, headers, XHR_TIMEOUT_SECS);\n });\n }\n\n invokeStreamingRPC<Req, Resp>(\n rpcName: string,\n request: Req,\n token: Token | null\n ): Promise<Resp[]> {\n // The REST API automatically aggregates all of the streamed results, so we\n // can just use the normal invoke() method.\n return this.invokeRPC<Req, Resp[]>(rpcName, request, token);\n }\n\n openStream<Req, Resp>(\n rpcName: string,\n token: Token | null\n ): Stream<Req, Resp> {\n const urlParts = [\n this.baseUrl,\n '/',\n RPC_STREAM_SERVICE,\n '/',\n rpcName,\n '/channel'\n ];\n const webchannelTransport = createWebChannelTransport();\n const request: WebChannelOptions = {\n // Required for backend stickiness, routing behavior is based on this\n // parameter.\n httpSessionIdParam: 'gsessionid',\n initMessageHeaders: {},\n messageUrlParams: {\n // This param is used to improve routing and project isolation by the\n // backend and must be included in every request.\n database: `projects/${this.databaseId.projectId}/databases/${this.databaseId.database}`\n },\n sendRawJson: true,\n supportsCrossDomainXhr: true,\n internalChannelParams: {\n // Override the default timeout (randomized between 10-20 seconds) since\n // a large write batch on a slow internet connection may take a long\n // time to send to the backend. Rather than have WebChannel impose a\n // tight timeout which could lead to infinite timeouts and retries, we\n // set it very large (5-10 minutes) and rely on the browser's builtin\n // timeouts to kick in if the request isn't working.\n forwardChannelRequestTimeoutMs: 10 * 60 * 1000\n },\n forceLongPolling: this.forceLongPolling\n };\n\n this.modifyHeadersForRequest(request.initMessageHeaders!, token);\n\n // Sending the custom headers we just added to request.initMessageHeaders\n // (Authorization, etc.) will trigger the browser to make a CORS preflight\n // request because the XHR will no longer meet the criteria for a \"simple\"\n // CORS request:\n // https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#Simple_requests\n //\n // Therefore to avoid the CORS preflight request (an extra network\n // roundtrip), we use the httpHeadersOverwriteParam option to specify that\n // the headers should instead be encoded into a special \"$httpHeaders\" query\n // parameter, which is recognized by the webchannel backend. This is\n // formally defined here:\n // https://github.com/google/closure-library/blob/b0e1815b13fb92a46d7c9b3c30de5d6a396a3245/closure/goog/net/rpc/httpcors.js#L32\n //\n // TODO(b/145624756): There is a backend bug where $httpHeaders isn't respected if the request\n // doesn't have an Origin header. So we have to exclude a few browser environments that are\n // known to (sometimes) not include an Origin. See\n // https://github.com/firebase/firebase-js-sdk/issues/1491.\n if (\n !isMobileCordova() &&\n !isReactNative() &&\n !isElectron() &&\n !isIE() &&\n !isUWP() &&\n !isBrowserExtension()\n ) {\n request.httpHeadersOverwriteParam = '$httpHeaders';\n }\n\n const url = urlParts.join('');\n logDebug(LOG_TAG, 'Creating WebChannel: ' + url + ' ' + request);\n const channel = webchannelTransport.createWebChannel(url, request);\n\n // WebChannel supports sending the first message with the handshake - saving\n // a network round trip. However, it will have to call send in the same\n // JS event loop as open. In order to enforce this, we delay actually\n // opening the WebChannel until send is called. Whether we have called\n // open is tracked with this variable.\n let opened = false;\n\n // A flag to determine whether the stream was closed (by us or through an\n // error/close event) to avoid delivering multiple close events or sending\n // on a closed stream\n let closed = false;\n\n const streamBridge = new StreamBridge<Req, Resp>({\n sendFn: (msg: Req) => {\n if (!closed) {\n if (!opened) {\n logDebug(LOG_TAG, 'Opening WebChannel transport.');\n channel.open();\n opened = true;\n }\n logDebug(LOG_TAG, 'WebChannel sending:', msg);\n channel.send(msg);\n } else {\n logDebug(LOG_TAG, 'Not sending because WebChannel is closed:', msg);\n }\n },\n closeFn: () => channel.close()\n });\n\n // Closure events are guarded and exceptions are swallowed, so catch any\n // exception and rethrow using a setTimeout so they become visible again.\n // Note that eventually this function could go away if we are confident\n // enough the code is exception free.\n const unguardedEventListen = <T>(\n type: string,\n fn: (param?: T) => void\n ): void => {\n // TODO(dimond): closure typing seems broken because WebChannel does\n // not implement goog.events.Listenable\n channel.listen(type, (param: unknown) => {\n try {\n fn(param as T);\n } catch (e) {\n setTimeout(() => {\n throw e;\n }, 0);\n }\n });\n };\n\n unguardedEventListen(WebChannel.EventType.OPEN, () => {\n if (!closed) {\n logDebug(LOG_TAG, 'WebChannel transport opened.');\n }\n });\n\n unguardedEventListen(WebChannel.EventType.CLOSE, () => {\n if (!closed) {\n closed = true;\n logDebug(LOG_TAG, 'WebChannel transport closed');\n streamBridge.callOnClose();\n }\n });\n\n unguardedEventListen<Error>(WebChannel.EventType.ERROR, err => {\n if (!closed) {\n closed = true;\n logDebug(LOG_TAG, 'WebChannel transport errored:', err);\n streamBridge.callOnClose(\n new FirestoreError(\n Code.UNAVAILABLE,\n 'The operation could not be completed'\n )\n );\n }\n });\n\n // WebChannel delivers message events as array. If batching is not enabled\n // (it's off by default) each message will be delivered alone, resulting in\n // a single element array.\n interface WebChannelResponse {\n data: Resp[];\n }\n\n unguardedEventListen<WebChannelResponse>(\n WebChannel.EventType.MESSAGE,\n msg => {\n if (!closed) {\n const msgData = msg!.data[0];\n hardAssert(!!msgData, 'Got a webchannel message without data.');\n // TODO(b/35143891): There is a bug in One Platform that caused errors\n // (and only errors) to be wrapped in an extra array. To be forward\n // compatible with the bug we need to check either condition. The latter\n // can be removed once the fix has been rolled out.\n // Use any because msgData.error is not typed.\n const msgDataOrError: WebChannelError | object = msgData;\n const error =\n msgDataOrError.error ||\n (msgDataOrError as WebChannelError[])[0]?.error;\n if (error) {\n logDebug(LOG_TAG, 'WebChannel received error:', error);\n // error.status will be a string like 'OK' or 'NOT_FOUND'.\n const status: string = error.status;\n let code = mapCodeFromRpcStatus(status);\n let message = error.message;\n if (code === undefined) {\n code = Code.INTERNAL;\n message =\n 'Unknown error status: ' +\n status +\n ' with message ' +\n error.message;\n }\n // Mark closed so no further events are propagated\n closed = true;\n streamBridge.callOnClose(new FirestoreError(code, message));\n channel.close();\n } else {\n logDebug(LOG_TAG, 'WebChannel received:', msgData);\n streamBridge.callOnMessage(msgData);\n }\n }\n }\n );\n\n setTimeout(() => {\n // Technically we could/should wait for the WebChannel opened event,\n // but because we want to send the first message with the WebChannel\n // handshake we pretend the channel opened here (asynchronously), and\n // then delay the actual open until the first message is sent.\n streamBridge.callOnOpen();\n }, 0);\n return streamBridge;\n }\n\n // visible for testing\n makeUrl(rpcName: string): string {\n const urlRpcName = RPC_NAME_REST_MAPPING[rpcName];\n debugAssert(\n urlRpcName !== undefined,\n 'Unknown REST mapping for: ' + rpcName\n );\n return (\n this.baseUrl +\n '/' +\n RPC_URL_VERSION +\n '/projects/' +\n this.databaseId.projectId +\n '/databases/' +\n this.databaseId.database +\n '/documents:' +\n urlRpcName\n );\n }\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport firebase from '@firebase/app';\nimport { FirebaseNamespace } from '@firebase/app-types';\n\nimport { Firestore } from './src/api/database';\nimport { MemoryComponentProvider } from './src/core/component_provider';\nimport { configureForFirebase } from './src/platform/config';\n\nimport './register-module';\nimport './src/platform_browser/browser_init';\n\nimport { name, version } from './package.json';\n\n/**\n * Registers the memory-only Firestore build with the components framework.\n */\nexport function registerFirestore(instance: FirebaseNamespace): void {\n configureForFirebase(\n instance,\n (app, auth) => new Firestore(app, auth, new MemoryComponentProvider())\n );\n instance.registerVersion(name, version);\n}\n\nregisterFirestore(firebase);\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { PlatformSupport } from '../platform/platform';\nimport { BrowserPlatform } from './browser_platform';\n\n/**\n * This code needs to run before Firestore is used. This can be achieved in\n * several ways:\n * 1) Through the JSCompiler compiling this code and then (automatically)\n * executing it before exporting the Firestore symbols.\n * 2) Through importing this module first in a Firestore main module\n */\nPlatformSupport.setPlatform(new BrowserPlatform());\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DatabaseId, DatabaseInfo } from '../core/database_info';\nimport { Platform } from '../platform/platform';\nimport { Connection } from '../remote/connection';\nimport { JsonProtoSerializer } from '../remote/serializer';\nimport { ConnectivityMonitor } from './../remote/connectivity_monitor';\n\nimport { NoopConnectivityMonitor } from '../remote/connectivity_monitor_noop';\nimport { BrowserConnectivityMonitor } from './browser_connectivity_monitor';\nimport { WebChannelConnection } from './webchannel_connection';\nimport { debugAssert } from '../util/assert';\n\n// Implements the Platform API for browsers and some browser-like environments\n// (including ReactNative).\nexport class BrowserPlatform implements Platform {\n readonly base64Available: boolean;\n\n constructor() {\n this.base64Available = typeof atob !== 'undefined';\n }\n\n get document(): Document | null {\n // `document` is not always available, e.g. in ReactNative and WebWorkers.\n // eslint-disable-next-line no-restricted-globals\n return typeof document !== 'undefined' ? document : null;\n }\n\n get window(): Window | null {\n // `window` is not always available, e.g. in ReactNative and WebWorkers.\n // eslint-disable-next-line no-restricted-globals\n return typeof window !== 'undefined' ? window : null;\n }\n\n loadConnection(databaseInfo: DatabaseInfo): Promise<Connection> {\n return Promise.resolve(new WebChannelConnection(databaseInfo));\n }\n\n newConnectivityMonitor(): ConnectivityMonitor {\n if (BrowserConnectivityMonitor.isAvailable()) {\n return new BrowserConnectivityMonitor();\n } else {\n return new NoopConnectivityMonitor();\n }\n }\n\n newSerializer(databaseId: DatabaseId): JsonProtoSerializer {\n return new JsonProtoSerializer(databaseId, { useProto3Json: true });\n }\n\n formatJSON(value: unknown): string {\n return JSON.stringify(value);\n }\n\n atob(encoded: string): string {\n return atob(encoded);\n }\n\n btoa(raw: string): string {\n return btoa(raw);\n }\n\n randomBytes(nBytes: number): Uint8Array {\n debugAssert(nBytes >= 0, `Expecting non-negative nBytes, got: ${nBytes}`);\n\n // Polyfills for IE and WebWorker by using `self` and `msCrypto` when `crypto` is not available.\n const crypto =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n typeof self !== 'undefined' && (self.crypto || (self as any)['msCrypto']);\n const bytes = new Uint8Array(nBytes);\n if (crypto) {\n crypto.getRandomValues(bytes);\n } else {\n // Falls back to Math.random\n for (let i = 0; i < nBytes; i++) {\n bytes[i] = Math.floor(Math.random() * 256);\n }\n }\n return bytes;\n }\n}\n"],"names":["SDK_VERSION","firebase","[object Object]","uid","this","__PRIVATE_isAuthenticated","__PRIVATE_otherUser","User","Code","OK","CANCELLED","UNKNOWN","INVALID_ARGUMENT","DEADLINE_EXCEEDED","NOT_FOUND","ALREADY_EXISTS","PERMISSION_DENIED","UNAUTHENTICATED","RESOURCE_EXHAUSTED","FAILED_PRECONDITION","ABORTED","OUT_OF_RANGE","UNIMPLEMENTED","INTERNAL","UNAVAILABLE","DATA_LOSS","code","message","super","toString","name","Error","value","user","__PRIVATE_authHeaders","Authorization","Promise","resolve","__PRIVATE_changeListener","__PRIVATE_authProvider","__PRIVATE_tokenListener","__PRIVATE_tokenCounter","currentUser","__PRIVATE_getUser","__PRIVATE_receivedInitialUser","auth","getImmediate","optional","addAuthTokenListener","m","get","then","__PRIVATE_initialTokenCounter","forceRefresh","getToken","__PRIVATE_tokenData","FirestoreError","__PRIVATE_hardAssert","accessToken","__PRIVATE_OAuthToken","removeAuthTokenListener","__PRIVATE_currentUid","getUid","__PRIVATE_gapi","__PRIVATE_sessionIndex","__PRIVATE_FIRST_PARTY","o","headers","X-Goog-AuthUser","__PRIVATE_authHeader","__PRIVATE_getAuthHeaderValueForFirstParty","__PRIVATE_FirstPartyToken","seconds","nanoseconds","Timestamp","fromMillis","Date","now","date","getTime","milliseconds","Math","floor","toMillis","other","__PRIVATE_primitiveComparator","__PRIVATE_adjustedSeconds","String","padStart","timestamp","__PRIVATE_SnapshotVersion","__PRIVATE__compareTo","isEqual","segments","offset","length","fail","__PRIVATE_len","__PRIVATE_BasePath","__PRIVATE_comparator","__PRIVATE_nameOrPath","slice","limit","forEach","__PRIVATE_segment","push","__PRIVATE_construct","size","index","__PRIVATE_i","__PRIVATE_potentialChild","fn","end","p1","p2","min","left","right","ResourcePath","__PRIVATE_toArray","join","__PRIVATE_canonicalString","path","indexOf","split","filter","__PRIVATE_identifierRegExp","FieldPath","test","map","str","replace","__PRIVATE_isValidIdentifier","__PRIVATE_current","__PRIVATE_addCurrentSegment","__PRIVATE_inBackticks","c","next","__PRIVATE_DocumentKey","__PRIVATE_fromString","__PRIVATE_popFirst","collectionId","k1","k2","__PRIVATE_objectSize","obj","count","key","Object","prototype","hasOwnProperty","call","__PRIVATE_isEmpty","__PRIVATE_binaryString","base64","__PRIVATE_ByteString","__PRIVATE_PlatformSupport","__PRIVATE_getPlatform","atob","array","fromCharCode","btoa","buffer","Uint8Array","charCodeAt","__PRIVATE_isNullOrUndefined","__PRIVATE_isNegativeZero","__PRIVATE_isServerTimestamp","mapValue","fields","__type__","stringValue","__PRIVATE_getLocalWriteTime","__PRIVATE_localWriteTime","__PRIVATE_normalizeTimestamp","__local_write_time__","timestampValue","nanos","__PRIVATE_ISO_TIMESTAMP_REG_EXP","RegExp","__PRIVATE_typeOrder","__PRIVATE_valueEquals","__PRIVATE_leftType","booleanValue","__PRIVATE_leftTimestamp","__PRIVATE_rightTimestamp","__PRIVATE_normalizeByteString","bytesValue","referenceValue","__PRIVATE_normalizeNumber","geoPointValue","latitude","longitude","integerValue","__PRIVATE_n1","doubleValue","__PRIVATE_n2","isNaN","__PRIVATE_arrayEquals","arrayValue","values","__PRIVATE_leftMap","__PRIVATE_rightMap","__PRIVATE_arrayValueContains","__PRIVATE_haystack","__PRIVATE_needle","find","v","__PRIVATE_valueCompare","__PRIVATE_rightType","__PRIVATE_leftNumber","__PRIVATE_rightNumber","__PRIVATE_compareTimestamps","__PRIVATE_leftBytes","__PRIVATE_rightBytes","__PRIVATE_compareTo","__PRIVATE_leftPath","__PRIVATE_rightPath","__PRIVATE_leftSegments","__PRIVATE_rightSegments","__PRIVATE_comparison","__PRIVATE_leftArray","__PRIVATE_rightArray","compare","__PRIVATE_leftKeys","keys","__PRIVATE_rightKeys","sort","__PRIVATE_keyCompare","canonicalId","__PRIVATE_canonifyValue","__PRIVATE_normalizedTimestamp","toBase64","__PRIVATE_fromName","__PRIVATE_geoPoint","result","first","__PRIVATE_sortedKeys","__PRIVATE_fraction","exec","__PRIVATE_nanoStr","substr","Number","__PRIVATE_parsedDate","blob","fromBase64String","fromUint8Array","__PRIVATE_refValue","__PRIVATE_databaseId","projectId","database","isInteger","isArray","__PRIVATE_isNullValue","__PRIVATE_isNanValue","__PRIVATE_isMapValue","previousValue","__previous_value__","__PRIVATE_transformResult","__PRIVATE_ServerTimestampTransform","elements","apply","__PRIVATE_coercedFieldValuesArray","__PRIVATE_toUnion","some","element","__PRIVATE_ArrayUnionTransformOperation","__PRIVATE_toRemove","__PRIVATE_ArrayRemoveTransformOperation","serializer","__PRIVATE_operand","__PRIVATE_baseValue","__PRIVATE_computeBaseValue","__PRIVATE_sum","asNumber","__PRIVATE_toInteger","__PRIVATE_toDouble","__PRIVATE_NumericIncrementTransformOperation","fieldPath","__PRIVATE_isPrefixOf","__PRIVATE_l","r","field","transform","version","transformResults","updateTime","exists","Precondition","wt","__PRIVATE_maybeDoc","Document","__PRIVATE_precondition","__PRIVATE_Mutation","__PRIVATE_mutationResult","__PRIVATE_verifyKeyMatches","hasCommittedMutations","__PRIVATE_baseDoc","__PRIVATE_isValidFor","__PRIVATE_getPostMutationVersion","At","__PRIVATE_SetMutation","data","__PRIVATE_fieldMask","__PRIVATE_UnknownDocument","__PRIVATE_newData","__PRIVATE_patchDocument","__PRIVATE_PatchMutation","__PRIVATE_ObjectValue","empty","__PRIVATE_patchObject","__PRIVATE_builder","__PRIVATE_ObjectValueBuilder","newValue","set","delete","__PRIVATE_build","fieldTransforms","doc","__PRIVATE_requireDocument","__PRIVATE_serverTransformResults","__PRIVATE_transformObject","__PRIVATE_localTransformResults","__PRIVATE_baseObject","__PRIVATE_fieldTransform","__PRIVATE_existingValue","__PRIVATE_coercedValue","__PRIVATE_TransformMutation","__PRIVATE_applyToRemoteDocument","__PRIVATE_applyToLocalView","__PRIVATE_NoDocument","__PRIVATE_DeleteMutation","__PRIVATE_VerifyMutation","proto","__PRIVATE_lastSegment","Map","__PRIVATE_setOverlay","__PRIVATE_currentLevel","__PRIVATE_overlayMap","__PRIVATE_currentSegment","currentValue","entries","__PRIVATE_mergedResult","__PRIVATE_applyOverlay","__PRIVATE_EMPTY_PATH","__PRIVATE_currentPath","__PRIVATE_currentOverlays","__PRIVATE_modified","__PRIVATE_resultAtPath","__PRIVATE_pathSegment","__PRIVATE_nested","child","__PRIVATE_extractFieldMask","__PRIVATE_nestedFields","__PRIVATE_nestedPath","__PRIVATE_FieldMask","__PRIVATE_RpcCode","__PRIVATE_objectValue","options","__PRIVATE_hasLocalMutations","__PRIVATE_MaybeDocument","hasPendingWrites","collectionGroup","orderBy","filters","startAt","endAt","__PRIVATE_memoizedCanonicalId","f","__PRIVATE_isDocumentKey","__PRIVATE_explicitOrderBy","__PRIVATE_limitType","__PRIVATE_assertValidBound","Query","__PRIVATE_memoizedOrderBy","__PRIVATE_inequalityField","__PRIVATE_getInequalityFilterField","__PRIVATE_firstOrderByField","__PRIVATE_getFirstOrderByField","__PRIVATE_isKeyField","__PRIVATE_OrderBy","__PRIVATE_keyField","__PRIVATE_foundKeyOrdering","__PRIVATE_lastDirection","dir","__PRIVATE_newFilters","concat","__PRIVATE_newOrderBy","bound","__PRIVATE_toTarget","__PRIVATE_d1","__PRIVATE_d2","__PRIVATE_comparedOnKeyField","__PRIVATE_comp","__PRIVATE_matchesPathAndCollectionGroup","__PRIVATE_matchesOrderBy","__PRIVATE_matchesFilters","__PRIVATE_matchesBounds","FieldFilter","__PRIVATE_isInequality","__PRIVATE_operators","op","__PRIVATE_isDocumentQuery","__PRIVATE_memoizedTarget","Target","__PRIVATE_orderBys","__PRIVATE_Bound","position","before","__PRIVATE_docPath","__PRIVATE_hasCollectionId","__PRIVATE_isImmediateParentOf","matches","__PRIVATE_sortsBeforeDocument","__PRIVATE_KeyFieldInFilter","__PRIVATE_KeyFieldFilter","__PRIVATE_ArrayContainsFilter","__PRIVATE_InFilter","__PRIVATE_ArrayContainsAnyFilter","__PRIVATE_matchesComparison","val","p","__PRIVATE_orderByComponent","component","__PRIVATE_isKeyOrderBy","v1","v2","target","targetId","__PRIVATE_purpose","sequenceNumber","__PRIVATE_snapshotVersion","lastLimboFreeSnapshotVersion","resumeToken","__PRIVATE_EMPTY_BYTE_STRING","__PRIVATE_TargetData","__PRIVATE_isPermanentError","__PRIVATE_mapCodeFromRpcCode","__PRIVATE_logError","RpcCode","root","__PRIVATE_LLRBNode","EMPTY","__PRIVATE_SortedMap","__PRIVATE_insert","__PRIVATE_copy","__PRIVATE_BLACK","remove","node","cmp","__PRIVATE_prunedNodes","__PRIVATE_minKey","__PRIVATE_maxKey","action","__PRIVATE_inorderTraversal","k","__PRIVATE_descriptions","__PRIVATE_reverseTraversal","__PRIVATE_SortedMapIterator","__PRIVATE_startKey","__PRIVATE_isReverse","__PRIVATE_nodeStack","pop","color","RED","n","__PRIVATE_fixUp","__PRIVATE_isRed","__PRIVATE_moveRedLeft","__PRIVATE_removeMin","__PRIVATE_smallest","__PRIVATE_rotateRight","__PRIVATE_moveRedRight","__PRIVATE_rotateLeft","__PRIVATE_colorFlip","__PRIVATE_nl","__PRIVATE_nr","__PRIVATE_blackDepth","__PRIVATE_check","pow","__PRIVATE_elem","cb","range","__PRIVATE_iter","__PRIVATE_getIteratorFrom","__PRIVATE_hasNext","__PRIVATE_getNext","start","__PRIVATE_getIterator","__PRIVATE_SortedSetIterator","has","add","__PRIVATE_SortedSet","__PRIVATE_thisIt","__PRIVATE_otherIt","__PRIVATE_thisElem","__PRIVATE_otherElem","__PRIVATE_res","__PRIVATE_EMPTY_MAYBE_DOCUMENT_MAP","__PRIVATE_maybeDocumentMap","__PRIVATE_nullableMaybeDocumentMap","__PRIVATE_EMPTY_DOCUMENT_MAP","__PRIVATE_documentMap","__PRIVATE_EMPTY_DOCUMENT_VERSION_MAP","__PRIVATE_documentVersionMap","__PRIVATE_EMPTY_DOCUMENT_KEY_SET","__PRIVATE_documentKeySet","__PRIVATE_EMPTY_TARGET_ID_SET","__PRIVATE_targetIdSet","__PRIVATE_keyedMap","__PRIVATE_sortedSet","__PRIVATE_oldSet","__PRIVATE_DocumentSet","__PRIVATE_thisDoc","__PRIVATE_otherDoc","__PRIVATE_docStrings","__PRIVATE_newSet","__PRIVATE_change","__PRIVATE_oldChange","__PRIVATE_changeMap","type","__PRIVATE_changes","query","docs","__PRIVATE_oldDocs","docChanges","__PRIVATE_mutatedKeys","fromCache","__PRIVATE_syncStateChanged","__PRIVATE_excludesMetadataChanges","documents","__PRIVATE_ViewSnapshot","__PRIVATE_emptySet","__PRIVATE_otherChanges","__PRIVATE_targetChanges","__PRIVATE_targetMismatches","__PRIVATE_documentUpdates","__PRIVATE_resolvedLimboDocuments","TargetChange","__PRIVATE_createSynthesizedTargetChangeForCurrentChange","__PRIVATE_RemoteEvent","__PRIVATE_addedDocuments","__PRIVATE_modifiedDocuments","__PRIVATE_removedDocuments","__PRIVATE_updatedTargetIds","removedTargetIds","__PRIVATE_newDoc","__PRIVATE_existenceFilter","state","targetIds","cause","__PRIVATE_snapshotChangesMap","ds","__PRIVATE__current","__PRIVATE__resumeToken","gs","__PRIVATE_pendingResponses","bs","__PRIVATE__hasPendingChanges","__PRIVATE_approximateByteSize","__PRIVATE_documentChanges","__PRIVATE_changeType","__PRIVATE_metadataProvider","__PRIVATE_documentTargetMap","__PRIVATE_docChange","__PRIVATE_addDocumentToTarget","__PRIVATE_removeDocumentFromTarget","targetChange","__PRIVATE_forEachTarget","__PRIVATE_targetState","__PRIVATE_ensureTargetState","__PRIVATE_isActiveTarget","__PRIVATE_updateResumeToken","__PRIVATE_recordTargetResponse","__PRIVATE_isPending","__PRIVATE_clearPendingChanges","removeTarget","__PRIVATE_markCurrent","__PRIVATE_resetTarget","__PRIVATE_targetStates","__PRIVATE__","__PRIVATE_watchChange","__PRIVATE_expectedCount","__PRIVATE_targetData","__PRIVATE_targetDataForActiveTarget","__PRIVATE_getCurrentDocumentCountForTarget","__PRIVATE_pendingTargetResets","__PRIVATE_pendingDocumentUpdates","__PRIVATE_targetContainsDocument","__PRIVATE_hasPendingChanges","__PRIVATE_toTargetChange","__PRIVATE_pendingDocumentTargetMapping","__PRIVATE_targets","__PRIVATE_isOnlyLimboTarget","__PRIVATE_forEachWhile","__PRIVATE_remoteEvent","document","__PRIVATE_addDocumentChange","__PRIVATE_ensureDocumentTargetMapping","__PRIVATE_updatedDocument","__PRIVATE_removeDocumentChange","__PRIVATE_getRemoteKeysForTarget","__PRIVATE_recordPendingTargetRequest","__PRIVATE_TargetState","__PRIVATE_targetMapping","__PRIVATE_targetActive","__PRIVATE_logDebug","__PRIVATE_getTargetDataForTarget","__PRIVATE_DIRECTIONS","asc","desc","__PRIVATE_OPERATORS","<","<=",">",">=","==","array-contains","in","array-contains-any","status","__PRIVATE_useProto3Json","Infinity","MAX_SAFE_INTEGER","MIN_SAFE_INTEGER","toISOString","bytes","toUint8Array","__PRIVATE_toTimestamp","__PRIVATE_fromTimestamp","__PRIVATE_fullyQualifiedPrefixPath","__PRIVATE_resource","__PRIVATE_isValidResourceName","__PRIVATE_toResourceName","__PRIVATE_fromResourceName","__PRIVATE_extractLocalPathFromResourceName","__PRIVATE_resourceName","mi","__PRIVATE_toName","__PRIVATE_toProto","fromVersion","found","missing","readTime","__PRIVATE_fromFound","__PRIVATE_fromMissing","__PRIVATE_fromWatchTargetChangeState","targetChangeType","__PRIVATE_fromBytes","__PRIVATE_causeProto","__PRIVATE_fromRpcStatus","__PRIVATE_WatchTargetChange","documentChange","__PRIVATE_entityChange","__PRIVATE_DocumentWatchChange","documentDelete","__PRIVATE_docDelete","documentRemove","__PRIVATE_docRemove","ExistenceFilter","__PRIVATE_ExistenceFilterChange","__PRIVATE_mutation","update","__PRIVATE_toMutationDocument","updateMask","__PRIVATE_toDocumentMask","__PRIVATE_toFieldTransform","verify","__PRIVATE_isNone","currentDocument","__PRIVATE_toPrecondition","__PRIVATE_fromPrecondition","__PRIVATE_none","__PRIVATE_fromDocumentMask","__PRIVATE_fromFieldTransform","toVersion","commitTime","__PRIVATE_MutationResult","__PRIVATE_protos","__PRIVATE_fromWriteResult","setToServerValue","appendMissingElements","removeAllFromArray","increment","instance","__PRIVATE_fromServerFormat","FieldTransform","__PRIVATE_toQueryPath","__PRIVATE_documentsTarget","__PRIVATE_atPath","__PRIVATE_fromQueryPath","structuredQuery","parent","from","allDescendants","__PRIVATE_popLast","where","__PRIVATE_toFilter","__PRIVATE_toOrder","__PRIVATE_toInt32Proto","__PRIVATE_toCursor","__PRIVATE_fromCount","__PRIVATE_filterBy","__PRIVATE_fromFilter","__PRIVATE_fromOrder","__PRIVATE_fromInt32Proto","__PRIVATE_fromCursor","__PRIVATE_toLabel","goog-listen-tags","__PRIVATE_toDocumentsTarget","__PRIVATE_toQueryTarget","__PRIVATE_toBytes","__PRIVATE_toUnaryOrFieldFilter","compositeFilter","unaryFilter","__PRIVATE_fromUnaryFilter","fieldFilter","__PRIVATE_fromFieldFilter","reduce","__PRIVATE_accum","order","__PRIVATE_toPropertyOrder","__PRIVATE_fromPropertyOrder","cursor","__PRIVATE_fieldReference","__PRIVATE_toFieldPathReference","direction","__PRIVATE_toDirection","__PRIVATE_fromFieldPathReference","__PRIVATE_fromDirection","create","__PRIVATE_fromOperatorName","__PRIVATE_toOperatorName","__PRIVATE_nanField","NaN","__PRIVATE_nullField","nullValue","__PRIVATE_canonicalFields","fieldPaths","__PRIVATE_paths","platform","__PRIVATE_logClient","Logger","__PRIVATE_getLogLevel","logLevel","setLogLevel","__PRIVATE_newLevel","msg","LogLevel","DEBUG","args","__PRIVATE_argToString","debug","ERROR","error","__PRIVATE_formatJSON","e","__PRIVATE_failure","assertion","__PRIVATE_debugCast","constructor","__PRIVATE_chars","__PRIVATE_maxMultiple","__PRIVATE_autoId","__PRIVATE_randomBytes","charAt","every","persistenceKey","host","ssl","forceLongPolling","_n","__PRIVATE_DatabaseId","__PRIVATE_mapKeyFn","id","__PRIVATE_inner","__PRIVATE_otherKey","splice","batchId","baseMutations","mutations","__PRIVATE_docKey","__PRIVATE_batchResult","__PRIVATE_mutationResults","__PRIVATE_maybeDocs","__PRIVATE_mutatedDocuments","__PRIVATE_m","__PRIVATE_mutatedDocument","batch","__PRIVATE_commitVersion","streamToken","__PRIVATE_docVersions","results","__PRIVATE_versionMap","__PRIVATE_MutationBatchResult","callback","__PRIVATE_isDone","__PRIVATE_nextCallback","__PRIVATE_catchCallback","__PRIVATE_nextFn","__PRIVATE_catchFn","__PRIVATE_callbackAttached","__PRIVATE_wrapFailure","__PRIVATE_wrapSuccess","PersistencePromise","reject","__PRIVATE_wrapUserFunction","all","__PRIVATE_resolvedCount","done","__PRIVATE_err","__PRIVATE_predicates","predicate","__PRIVATE_isTrue","collection","__PRIVATE_promises","s","__PRIVATE_waitFor","__PRIVATE_remoteDocumentCache","__PRIVATE_mutationQueue","__PRIVATE_indexManager","transaction","__PRIVATE_getAllMutationBatchesAffectingDocumentKey","__PRIVATE_batches","__PRIVATE_getDocumentInternal","__PRIVATE_inBatches","__PRIVATE_getEntry","__PRIVATE_localView","getEntries","__PRIVATE_getLocalViewOfDocuments","__PRIVATE_baseDocs","__PRIVATE_getAllMutationBatchesAffectingDocumentKeys","__PRIVATE_applyLocalMutationsToDocuments","__PRIVATE_sinceReadTime","__PRIVATE_getDocumentsMatchingDocumentQuery","__PRIVATE_isCollectionGroupQuery","__PRIVATE_getDocumentsMatchingCollectionGroupQuery","__PRIVATE_getDocumentsMatchingCollectionQuery","__PRIVATE_getDocument","__PRIVATE_getCollectionParents","__PRIVATE_parents","__PRIVATE_collectionQuery","__PRIVATE_asCollectionQueryAtPath","__PRIVATE_mutationBatches","__PRIVATE_getDocumentsMatchingQuery","__PRIVATE_queryResults","__PRIVATE_getAllMutationBatchesAffectingQuery","__PRIVATE_matchingMutationBatches","__PRIVATE_addMissingBaseDocuments","__PRIVATE_mergedDocuments","__PRIVATE_mutatedDoc","__PRIVATE_existingDocuments","__PRIVATE_missingBaseDocEntriesForPatching","__PRIVATE_missingBaseDocs","__PRIVATE_addedKeys","__PRIVATE_removedKeys","__PRIVATE_viewSnapshot","__PRIVATE_LocalViewChanges","__PRIVATE_sequenceNumberSyncer","__PRIVATE_sequenceNumberHandler","__PRIVATE_setPreviousValue","__PRIVATE_writeNewSequenceNumber","__PRIVATE_writeSequenceNumber","__PRIVATE_externalPreviousValue","max","__PRIVATE_nextValue","__PRIVATE_ListenSequence","promise","__PRIVATE_queue","__PRIVATE_timerId","__PRIVATE_initialDelayMs","__PRIVATE_backoffFactor","__PRIVATE_maxDelayMs","reset","__PRIVATE_currentBaseMs","cancel","__PRIVATE_desiredDelayWithJitterMs","__PRIVATE_jitterDelayMs","__PRIVATE_delaySoFarMs","__PRIVATE_lastAttemptTime","__PRIVATE_remainingDelayMs","__PRIVATE_timerPromise","__PRIVATE_enqueueAfterDelay","__PRIVATE_skipDelay","random","__PRIVATE_MemoryCollectionParentIndex","collectionPath","__PRIVATE_collectionParentIndex","parentPath","__PRIVATE_existingParents","__PRIVATE_added","__PRIVATE_lastId","__PRIVATE_TargetIdGenerator","__PRIVATE_isIndexedDbTransactionError","__PRIVATE_asyncQueue","__PRIVATE_targetTimeMs","__PRIVATE_removalCallback","__PRIVATE_Deferred","__PRIVATE_deferred","bind","catch","__PRIVATE_delayMs","__PRIVATE_delayedOp","__PRIVATE_DelayedOperation","__PRIVATE_timerHandle","setTimeout","__PRIVATE_handleDelayElapsed","reason","clearTimeout","__PRIVATE_enqueueAndForget","__PRIVATE_ExponentialBackoff","__PRIVATE_backoff","__PRIVATE_skipBackoff","window","addEventListener","__PRIVATE_visibilityHandler","Mr","__PRIVATE__isShuttingDown","enqueue","__PRIVATE_verifyNotFailed","__PRIVATE_enqueueInternal","removeEventListener","__PRIVATE_enqueueEvenAfterShutdown","__PRIVATE_retryableTail","__PRIVATE_retryingOp","async","__PRIVATE_backoffAndRun","__PRIVATE_newTail","__PRIVATE_tail","__PRIVATE_operationInProgress","stack","__PRIVATE_timerIdsToSkip","__PRIVATE_createAndSchedule","__PRIVATE_removedOp","__PRIVATE_removeDelayedOperation","__PRIVATE_delayedOperations","__PRIVATE_currentTail","__PRIVATE_lastTimerId","__PRIVATE_drain","a","b","__PRIVATE_wrapInUserErrorIfRecoverable","__PRIVATE_cacheSizeCollectionThreshold","__PRIVATE_percentileToCollect","__PRIVATE_maximumSequenceNumbersToCollect","__PRIVATE_cacheSize","__PRIVATE_LruParams","__PRIVATE_DEFAULT_COLLECTION_PERCENTILE","__PRIVATE_DEFAULT_MAX_SEQUENCE_NUMBERS_TO_COLLECT","__PRIVATE_DEFAULT_CACHE_SIZE_BYTES","__PRIVATE_COLLECTION_DISABLED","persistence","__PRIVATE_queryEngine","__PRIVATE_initialUser","__PRIVATE_ObjectMap","t","__PRIVATE_getMutationQueue","__PRIVATE_remoteDocuments","__PRIVATE_getRemoteDocumentCache","__PRIVATE_targetCache","__PRIVATE_getTargetCache","__PRIVATE_localDocuments","__PRIVATE_LocalDocumentsView","__PRIVATE_getIndexManager","__PRIVATE_setLocalDocumentsView","__PRIVATE_newMutationQueue","__PRIVATE_newLocalDocuments","runTransaction","txn","__PRIVATE_oldBatches","__PRIVATE_getAllMutationBatches","__PRIVATE_promisedOldBatches","__PRIVATE_newBatches","__PRIVATE_removedBatchIds","__PRIVATE_addedBatchIds","__PRIVATE_changedKeys","__PRIVATE_getDocuments","__PRIVATE_affectedDocuments","Vh","ph","yh","__PRIVATE_existingDocs","__PRIVATE_extractBaseValue","__PRIVATE_addMutationBatch","__PRIVATE_applyToLocalDocumentSet","vh","__PRIVATE_affected","__PRIVATE_documentBuffer","__PRIVATE_newChangeBuffer","Dh","__PRIVATE_acknowledgeBatch","__PRIVATE_applyWriteToRemoteDocuments","__PRIVATE_performConsistencyCheck","__PRIVATE_affectedKeys","__PRIVATE_lookupMutationBatch","__PRIVATE_removeMutationBatch","__PRIVATE_getHighestUnacknowledgedBatchId","__PRIVATE_getLastStreamToken","__PRIVATE_setLastStreamToken","__PRIVATE_getLastRemoteSnapshotVersion","__PRIVATE_remoteVersion","__PRIVATE_newTargetDataByTargetMap","__PRIVATE_targetDataByTarget","__PRIVATE_oldTargetData","__PRIVATE_removeMatchingKeys","__PRIVATE_addMatchingKeys","__PRIVATE_newTargetData","__PRIVATE_withResumeToken","__PRIVATE_withSequenceNumber","__PRIVATE_currentSequenceNumber","__PRIVATE_LocalStore","__PRIVATE_shouldPersistTargetData","__PRIVATE_updateTargetData","__PRIVATE_changedDocs","__PRIVATE_updatedKeys","__PRIVATE_existingDoc","__PRIVATE_removeEntry","__PRIVATE_addEntry","__PRIVATE_referenceDelegate","__PRIVATE_updateLimboDocument","__PRIVATE_updateRemoteVersion","lastRemoteSnapshotVersion","__PRIVATE_setTargetsMetadata","__PRIVATE_toMicroseconds","__PRIVATE_RESUME_TOKEN_MAX_AGE_MICROS","__PRIVATE_viewChanges","__PRIVATE_viewChange","__PRIVATE_addReference","__PRIVATE_removeReference","e_42","__PRIVATE_updatedTargetData","__PRIVATE_withLastLimboFreeSnapshotVersion","__PRIVATE_afterBatchId","__PRIVATE_getNextMutationBatchAfterBatchId","__PRIVATE_getTargetData","__PRIVATE_cached","__PRIVATE_allocateTargetId","__PRIVATE_addTargetData","__PRIVATE_targetIdByTarget","__PRIVATE_keepPersistedTargetData","mode","__PRIVATE_usePreviousResults","__PRIVATE_remoteKeys","__PRIVATE_getMatchingKeysForTargetId","_o","__PRIVATE_docKeys","__PRIVATE_promiseChain","__PRIVATE_remoteDoc","__PRIVATE_ackVersion","__PRIVATE_garbageCollector","__PRIVATE_collect","__PRIVATE_ignoreIfPrimaryLeaseLoss","__PRIVATE_DocReference","__PRIVATE_compareByKey","__PRIVATE_compareByTargetId","__PRIVATE_refsByKey","ref","__PRIVATE_refsByTarget","__PRIVATE_removeRef","__PRIVATE_emptyKey","__PRIVATE_startRef","__PRIVATE_endRef","__PRIVATE_forEachInRange","__PRIVATE_firstRef","__PRIVATE_firstAfterOrEqual","__PRIVATE_targetOrBatchId","__PRIVATE_validateNoArgs","functionName","__PRIVATE_formatPlural","__PRIVATE_validateExactNumberOfArgs","__PRIVATE_numberOfArgs","__PRIVATE_validateAtLeastNumberOfArgs","__PRIVATE_minNumberOfArgs","__PRIVATE_validateBetweenNumberOfArgs","__PRIVATE_maxNumberOfArgs","__PRIVATE_validateArgType","__PRIVATE_argument","__PRIVATE_validateType","__PRIVATE_ordinal","__PRIVATE_validateOptionalArgType","__PRIVATE_validateNamedType","__PRIVATE_optionName","__PRIVATE_validateNamedOptionalType","__PRIVATE_validateNamedOptionalPropertyEquals","__PRIVATE_inputName","input","__PRIVATE_expected","__PRIVATE_expectedDescription","__PRIVATE_valueDescription","__PRIVATE_actualDescription","__PRIVATE_isPlainObject","description","getPrototypeOf","substring","JSON","stringify","Array","__PRIVATE_customObjectName","__PRIVATE_validateDefined","__PRIVATE_validateOptionNames","__PRIVATE_optionNames","__PRIVATE_invalidClassError","__PRIVATE_validatePositiveNumber","num","__PRIVATE_assertUint8ArrayAvailable","__PRIVATE_assertBase64Available","__PRIVATE_base64Available","__PRIVATE_byteString","__PRIVATE__byteString","arguments","Blob","__PRIVATE_minNumberOfElements","fieldNames","__PRIVATE__internalPath","__PRIVATE_InternalFieldPath","__PRIVATE__DOCUMENT_ID","__PRIVATE_RESERVED","__PRIVATE__methodName","__PRIVATE_FieldValueImpl","context","__PRIVATE_dataSource","__PRIVATE_createError","__PRIVATE_DeleteFieldValueImpl","__PRIVATE_ServerTimestampFieldValueImpl","__PRIVATE__elements","__PRIVATE_parseContext","__PRIVATE_ParseContext","No","methodName","Lo","ignoreUndefinedProperties","__PRIVATE_parsedElements","__PRIVATE_parseData","arrayUnion","__PRIVATE__operand","__PRIVATE_numericIncrement","isFinite","__PRIVATE__lat","__PRIVATE__long","__PRIVATE_RESERVED_FIELD_REGEX","__PRIVATE_isWrite","settings","__PRIVATE_validatePath","configuration","__PRIVATE_childPath","__PRIVATE_contextWith","__PRIVATE_validatePathSegment","__PRIVATE_fieldDescription","__PRIVATE_newSerializer","__PRIVATE_createContext","__PRIVATE_validatePlainObject","__PRIVATE_updateData","__PRIVATE_parseObject","__PRIVATE_ParsedSetData","__PRIVATE_validatedFieldPaths","__PRIVATE_stringOrFieldPath","__PRIVATE_ExternalFieldPath","__PRIVATE_fieldPathFromDotSeparatedString","contains","__PRIVATE_fieldMaskContains","__PRIVATE_covers","__PRIVATE_fieldMaskPaths","__PRIVATE_childContext","__PRIVATE_childContextForFieldPath","__PRIVATE_parsedValue","mask","__PRIVATE_ParsedUpdateData","moreFieldsAndValues","__PRIVATE_fieldPathFromArgument","__PRIVATE_allowArrays","__PRIVATE_looksLikeJsonObject","__PRIVATE_arrayElement","__PRIVATE_entryIndex","__PRIVATE_parsedEntry","__PRIVATE_childContextForArray","__PRIVATE_toNumber","fromDate","GeoPoint","DocumentReference","__PRIVATE_thisDb","__PRIVATE_otherDb","firestore","__PRIVATE__databaseId","__PRIVATE__key","__PRIVATE_childContextForField","search","__PRIVATE_connectionTimerId","__PRIVATE_idleTimerId","__PRIVATE_connection","__PRIVATE_credentialsProvider","listener","__PRIVATE_performBackoff","__PRIVATE_isStarted","close","__PRIVATE_isOpen","__PRIVATE_idleTimer","__PRIVATE_handleIdleCloseTimer","__PRIVATE_cancelIdleCheck","stream","send","__PRIVATE_finalState","__PRIVATE_closeCount","__PRIVATE_resetToMax","__PRIVATE_invalidateToken","__PRIVATE_tearDown","__PRIVATE_onClose","__PRIVATE_dispatchIfNotClosed","__PRIVATE_getCloseGuardedDispatcher","token","__PRIVATE_startStream","__PRIVATE_rpcError","__PRIVATE_handleStreamClose","__PRIVATE_startRpc","__PRIVATE_onOpen","onMessage","__PRIVATE_startCloseCount","credentials","__PRIVATE_PersistentStream","__PRIVATE_openStream","__PRIVATE_watchChangeProto","__PRIVATE_fromWatchChange","snapshot","__PRIVATE_versionFromListenResponse","__PRIVATE_onWatchChange","request","__PRIVATE_encodedDatabaseId","addTarget","labels","__PRIVATE_toListenRequestLabels","__PRIVATE_sendRequest","ba","__PRIVATE_handshakeComplete_","__PRIVATE_writeMutations","__PRIVATE_responseProto","lastStreamToken","__PRIVATE_fromWriteResults","writeResults","__PRIVATE_onMutationResult","__PRIVATE_onHandshakeComplete","writes","__PRIVATE_toMutation","__PRIVATE_rpcName","__PRIVATE_invokeRPC","__PRIVATE_invokeStreamingRPC","__PRIVATE_datastore","Set","__PRIVATE_ensureCommitNotCalled","__PRIVATE_datastoreImpl","__PRIVATE_params","response","__PRIVATE_fromMaybeDocument","__PRIVATE_recordVersion","write","__PRIVATE_toMutations","__PRIVATE_writtenDocs","__PRIVATE_preconditionForUpdate","__PRIVATE_lastWriteError","__PRIVATE_unwritten","__PRIVATE_readVersions","__PRIVATE__version","__PRIVATE_committed","__PRIVATE_docVersion","__PRIVATE_existingVersion","__PRIVATE_onlineStateHandler","__PRIVATE_watchStreamFailures","__PRIVATE_setAndBroadcast","__PRIVATE_onlineStateTimer","__PRIVATE_logClientOfflineWarningIfNecessary","__PRIVATE_clearOnlineStateTimer","__PRIVATE_newState","__PRIVATE_shouldWarnClientIsOffline","details","__PRIVATE_localStore","__PRIVATE_connectivityMonitor","__PRIVATE_addCallback","__PRIVATE_canUseNetwork","__PRIVATE_restartNetwork","__PRIVATE_onlineStateTracker","__PRIVATE_OnlineStateTracker","__PRIVATE_watchStream","__PRIVATE_PersistentListenStream","Aa","__PRIVATE_onWatchStreamOpen","Ta","__PRIVATE_onWatchStreamClose","Va","__PRIVATE_onWatchStreamChange","__PRIVATE_writeStream","__PRIVATE_PersistentWriteStream","__PRIVATE_onWriteStreamOpen","__PRIVATE_onWriteStreamClose","Ca","__PRIVATE_onWriteHandshakeComplete","Sa","enableNetwork","networkEnabled","__PRIVATE_enableNetworkInternal","__PRIVATE_shouldStartWatchStream","__PRIVATE_startWatchStream","__PRIVATE_fillWritePipeline","__PRIVATE_disableNetworkInternal","stop","__PRIVATE_writePipeline","__PRIVATE_cleanUpWatchStreamState","__PRIVATE_shutdown","__PRIVATE_listenTargets","__PRIVATE_sendWatchRequest","__PRIVATE_sendUnwatchRequest","__PRIVATE_markIdle","__PRIVATE_syncEngine","__PRIVATE_watchChangeAggregator","__PRIVATE_watch","__PRIVATE_unwatch","__PRIVATE_WatchChangeAggregator","__PRIVATE_handleWatchStreamStart","__PRIVATE_indexedDbFailed","isPrimary","__PRIVATE_handleWatchStreamFailure","__PRIVATE_handleTargetError","__PRIVATE_disableNetworkUntilRecovery","__PRIVATE_handleDocumentChange","__PRIVATE_handleExistenceFilter","__PRIVATE_handleTargetChange","__PRIVATE_raiseWatchSnapshot","__PRIVATE_enqueueRetryable","__PRIVATE_createRemoteEvent","__PRIVATE_requestTargetData","__PRIVATE_applyRemoteEvent","s_49","__PRIVATE_rejectListen","__PRIVATE_canAddToWritePipeline","__PRIVATE_lastBatchIdRetrieved","__PRIVATE_nextMutationBatch","__PRIVATE_addToWritePipeline","__PRIVATE_shouldStartWriteStream","__PRIVATE_startWriteStream","__PRIVATE_handshakeComplete","__PRIVATE_writeHandshake","shift","__PRIVATE_success","__PRIVATE_applySuccessfulWrite","__PRIVATE_handleWriteError","__PRIVATE_handleHandshakeError","__PRIVATE_inhibitBackoff","__PRIVATE_rejectFailedWrite","Transaction","activeTargetIds","updateTimeMs","__PRIVATE_LocalClientState","__PRIVATE_localState","__PRIVATE_addQueryTarget","__PRIVATE_queryState","__PRIVATE_removeQueryTarget","onlineState","__PRIVATE__syncedDocuments","__PRIVATE_documentSet","__PRIVATE_docComparator","uc","__PRIVATE_previousChanges","__PRIVATE_changeSet","__PRIVATE_DocumentChangeSet","__PRIVATE_oldDocumentSet","__PRIVATE_newMutatedKeys","__PRIVATE_newDocumentSet","__PRIVATE_needsRefill","__PRIVATE_lastDocInLimit","__PRIVATE_hasLimitToFirst","last","__PRIVATE_firstDocInLimit","__PRIVATE_hasLimitToLast","__PRIVATE_newMaybeDoc","__PRIVATE_oldDoc","__PRIVATE_oldDocHadPendingMutations","__PRIVATE_newDocHasPendingMutations","__PRIVATE_changeApplied","track","__PRIVATE_shouldWaitForSyncedDocument","ac","lc","fc","ns","__PRIVATE_updateLimboDocuments","__PRIVATE_getChanges","__PRIVATE_c1","__PRIVATE_c2","__PRIVATE_applyTargetChange","__PRIVATE_limboChanges","__PRIVATE_newSyncState","__PRIVATE_limboDocuments","__PRIVATE_syncState","Ec","__PRIVATE_applyChanges","__PRIVATE_oldLimboDocuments","__PRIVATE_shouldBeInLimbo","__PRIVATE_RemovedLimboDocument","__PRIVATE_AddedLimboDocument","__PRIVATE_queryResult","__PRIVATE_computeDocChanges","__PRIVATE_fromInitialDocuments","__PRIVATE_remoteStore","updateFunction","__PRIVATE_runWithBackOff","__PRIVATE_createTransaction","__PRIVATE_userPromise","__PRIVATE_tryRunUpdateFunction","commit","__PRIVATE_commitError","__PRIVATE_handleTransactionError","__PRIVATE_userPromiseError","__PRIVATE_retries","__PRIVATE_isRetryableTransactionError","view","__PRIVATE_sharedClientState","__PRIVATE_maxConcurrentLimboResolutions","q","__PRIVATE_ReferenceSet","__PRIVATE_forSyncEngine","Uc","__PRIVATE_syncEngineListener","__PRIVATE_assertSubscribed","__PRIVATE_queryView","__PRIVATE_queryViewsByQuery","__PRIVATE_addLocalQueryTarget","__PRIVATE_computeInitialSnapshot","__PRIVATE_allocateTarget","__PRIVATE_initializeViewAndComputeSnapshot","__PRIVATE_isPrimaryClient","listen","__PRIVATE_executeQuery","__PRIVATE_View","__PRIVATE_viewDocChanges","__PRIVATE_synthesizedTargetChange","__PRIVATE_updateTrackedLimbos","__PRIVATE_QueryView","__PRIVATE_queriesByTarget","__PRIVATE_queries","__PRIVATE_removeLocalQueryTarget","__PRIVATE_isActiveQueryTarget","__PRIVATE_releaseTarget","__PRIVATE_clearQueryState","__PRIVATE_unlisten","__PRIVATE_removeAndCleanupTarget","__PRIVATE_userCallback","__PRIVATE_localWrite","__PRIVATE_addPendingMutation","__PRIVATE_addMutationCallback","__PRIVATE_emitNewSnapsAndNotifyLocalStore","__PRIVATE_TransactionRunner","__PRIVATE_run","__PRIVATE_limboResolution","__PRIVATE_activeLimboResolutionsByTarget","__PRIVATE_receivedDocument","source","__PRIVATE_newViewSnapshots","__PRIVATE_applyOnlineStateChange","__PRIVATE_onOnlineStateChange","__PRIVATE_updateQueryState","__PRIVATE_limboKey","event","__PRIVATE_activeLimboTargetsByKey","__PRIVATE_pumpEnqueuedLimboResolutions","__PRIVATE_mutationBatchResult","__PRIVATE_processUserCallback","__PRIVATE_triggerPendingWritesCallbacks","__PRIVATE_updateMutationState","__PRIVATE_rejectBatch","__PRIVATE_highestBatchId","__PRIVATE_callbacks","__PRIVATE_pendingWritesCallbacks","__PRIVATE_firestoreError","__PRIVATE_errorMessage","clear","__PRIVATE_newCallbacks","__PRIVATE_mutationUserCallbacks","__PRIVATE_toKey","__PRIVATE_onWatchError","__PRIVATE_limboDocumentRefs","__PRIVATE_removeReferencesForId","__PRIVATE_containsKey","__PRIVATE_removeLimboTarget","__PRIVATE_limboTargetId","__PRIVATE_limboChange","__PRIVATE_trackLimboChange","__PRIVATE_enqueuedLimboResolutions","__PRIVATE_limboTargetIdGenerator","__PRIVATE_LimboResolution","__PRIVATE_INVALID","__PRIVATE_newSnaps","__PRIVATE_docChangesInAllViews","__PRIVATE_queriesProcessed","__PRIVATE_fromSnapshot","__PRIVATE_notifyLocalViewChanges","__PRIVATE_fnName","__PRIVATE_handleUserChange","__PRIVATE_rejectOutstandingPendingWritesCallbacks","__PRIVATE_handleCredentialChange","disableNetwork","__PRIVATE_keySet","__PRIVATE_unionWith","__PRIVATE_syncedDocuments","subscribe","__PRIVATE_firstListen","__PRIVATE_queryInfo","__PRIVATE_QueryListenersInfo","__PRIVATE_viewSnap","onError","__PRIVATE_listeners","__PRIVATE_onViewSnapshot","__PRIVATE_raiseSnapshotsInSyncEvent","__PRIVATE_lastListen","__PRIVATE_viewSnaps","__PRIVATE_raisedEvent","observer","__PRIVATE_snapshotsInSyncListeners","__PRIVATE_queryObserver","__PRIVATE_snap","includeMetadataChanges","__PRIVATE_raisedInitialEvent","__PRIVATE_shouldRaiseEvent","__PRIVATE_shouldRaiseInitialEvent","__PRIVATE_raiseInitialEvent","__PRIVATE_maybeOnline","__PRIVATE_waitForSyncWhenOnline","__PRIVATE_hasPendingWritesChanged","__PRIVATE_localDocumentsView","__PRIVATE_matchesAllDocuments","__PRIVATE_executeFullCollectionScan","__PRIVATE_previousResults","__PRIVATE_applyQuery","__PRIVATE_updatedResults","__PRIVATE_sortedPreviousResults","__PRIVATE_limboFreeSnapshotVersion","__PRIVATE_docAtLimitEdge","__PRIVATE_batchIndex","__PRIVATE_indexOfExistingBatchId","__PRIVATE_nextBatchId","__PRIVATE_MutationBatch","__PRIVATE_batchesByDocumentKey","__PRIVATE_addToCollectionParentIndex","__PRIVATE_findMutationBatch","__PRIVATE_rawIndex","__PRIVATE_indexOfBatchId","__PRIVATE_documentKey","POSITIVE_INFINITY","__PRIVATE_documentKeys","__PRIVATE_uniqueBatchIDs","__PRIVATE_findMutationBatches","prefix","__PRIVATE_immediateChildrenPathLength","__PRIVATE_startPath","__PRIVATE_rowKeyPath","__PRIVATE_batchIDs","__PRIVATE_references","__PRIVATE_markPotentiallyOrphaned","__PRIVATE_sizer","__PRIVATE_entry","__PRIVATE_previousSize","__PRIVATE_currentSize","$l","__PRIVATE_maybeDocument","iterator","__PRIVATE_MemoryRemoteDocumentCache","__PRIVATE_RemoteDocumentChangeBuffer","__PRIVATE_documentCache","__PRIVATE__readTime","__PRIVATE_assertNotApplied","__PRIVATE_bufferedEntry","__PRIVATE_getFromCache","__PRIVATE_getAllFromCache","__PRIVATE_changesApplied","__PRIVATE_forTargetCache","__PRIVATE_highestSequenceNumber","highestTargetId","__PRIVATE_targetIdGenerator","highestListenSequenceNumber","__PRIVATE_saveTargetData","targetCount","upperBound","__PRIVATE_removals","__PRIVATE_removeMatchingKeysForTargetId","__PRIVATE_addReferences","__PRIVATE_removeReferences","__PRIVATE_matchingKeys","__PRIVATE_referencesForId","__PRIVATE_referenceDelegateFactory","__PRIVATE__started","__PRIVATE_MemoryTargetCache","__PRIVATE_MemoryIndexManager","__PRIVATE_documentSize","n_","__PRIVATE_mutationQueues","__PRIVATE_MemoryMutationQueue","__PRIVATE_transactionOperation","__PRIVATE_MemoryTransaction","__PRIVATE_listenSequence","__PRIVATE_onTransactionStarted","__PRIVATE_onTransactionCommitted","__PRIVATE_toPromise","__PRIVATE_raiseOnCommittedEvent","__PRIVATE_or","__PRIVATE_onCommittedListeners","__PRIVATE_MemoryEagerDelegate","w_","__PRIVATE__orphanedDocuments","__PRIVATE_localViewReferences","__PRIVATE_orphanedDocuments","cache","__PRIVATE_removeTargetData","__PRIVATE_changeBuffer","__PRIVATE_isReferenced","__PRIVATE_mutationQueuesContainKey","__PRIVATE_cfg","__PRIVATE_createSharedClientState","__PRIVATE_createPersistence","__PRIVATE_gcScheduler","__PRIVATE_createGarbageCollectionScheduler","__PRIVATE_createLocalStore","__PRIVATE_createRemoteStore","__PRIVATE_createSyncEngine","__PRIVATE_eventManager","__PRIVATE_createEventManager","__PRIVATE_applyPrimaryState","__PRIVATE_EventManager","__PRIVATE_IndexFreeQueryEngine","__PRIVATE_MemoryPersistence","__PRIVATE_factory","__PRIVATE_RemoteStore","__PRIVATE_newConnectivityMonitor","__PRIVATE_MemorySharedClientState","__PRIVATE_SyncEngine","__PRIVATE_databaseInfo","__PRIVATE_AutoId","__PRIVATE_newId","__PRIVATE_componentProvider","__PRIVATE_persistenceSettings","__PRIVATE_verifyNotTerminated","__PRIVATE_initializationDone","__PRIVATE_persistenceResult","__PRIVATE_initialized","__PRIVATE_setChangeListener","__PRIVATE_initializeComponents","__PRIVATE_loadConnection","__PRIVATE_DatastoreImpl","initialize","Pr","v_","ka","clientId","g_","Dc","F_","__PRIVATE_eventMgr","__PRIVATE_setDatabaseDeletedListener","terminate","__PRIVATE_canFallback","console","warn","__PRIVATE_MemoryComponentProvider","L_","DOMException","__PRIVATE_isShuttingDown","__PRIVATE_verifyOperationInProgress","__PRIVATE_enqueueAndInitiateShutdown","__PRIVATE_removeChangeListener","__PRIVATE_registerPendingWritesCallback","__PRIVATE_QueryListener","__PRIVATE_clientTerminated","__PRIVATE_readDocument","__PRIVATE_addSnapshotsInSyncListener","__PRIVATE_removeSnapshotsInSyncListener","O_","__PRIVATE_scheduleEvent","muted","eventHandler","__PRIVATE_isPartialObserver","__PRIVATE_methods","object","method","timestampsInSnapshots","__PRIVATE_serverTimestampBehavior","converter","__PRIVATE_convertTimestamp","__PRIVATE_convertServerTimestamp","__PRIVATE_convertReference","__PRIVATE_convertGeoPoint","__PRIVATE_convertArray","__PRIVATE_convertObject","__PRIVATE_convertValue","__PRIVATE_getPreviousValue","__PRIVATE_normalizedValue","toDate","__PRIVATE_resourcePath","CACHE_SIZE_UNLIMITED","cacheSizeBytes","__PRIVATE_MINIMUM_CACHE_SIZE_BYTES","experimentalForceLongPolling","__PRIVATE_databaseIdOrApp","__PRIVATE_AsyncQueue","__PRIVATE_ensureClientConfigured","__PRIVATE__firestoreClient","app","__PRIVATE__firebaseApp","Firestore","__PRIVATE_databaseIdFromApp","__PRIVATE__persistenceKey","__PRIVATE__credentials","__PRIVATE_FirebaseCredentialsProvider","external","__PRIVATE_EmptyCredentialsProvider","__PRIVATE__componentProvider","__PRIVATE__settings","__PRIVATE_FirestoreSettings","rf","__PRIVATE__userDataReader","__PRIVATE_UserDataReader","__PRIVATE_settingsLiteral","__PRIVATE_newSettings","__PRIVATE_client","getAuthHeaderValueForFirstParty","__PRIVATE_FirstPartyCredentialsProvider","synchronizeTabs","experimentalTabSynchronization","__PRIVATE_configureClient","__PRIVATE__queue","__PRIVATE_enqueueAndForgetEvenAfterShutdown","__PRIVATE_makeDatabaseInfo","clearPersistence","_removeServiceInstance","lf","waitForPendingWrites","arg","__PRIVATE_onSnapshotsInSyncInternal","__PRIVATE_asyncObserver","__PRIVATE_AsyncObserver","__PRIVATE_mute","__PRIVATE_DatabaseInfo","__PRIVATE_FirestoreClient","__PRIVATE_pathString","CollectionReference","__PRIVATE_forPath","__PRIVATE_InternalQuery","WriteBatch","SILENT","level","__PRIVATE__firestore","__PRIVATE__transaction","documentRef","__PRIVATE_validateReference","__PRIVATE_lookup","DocumentSnapshot","__PRIVATE__converter","__PRIVATE_validateSetOptions","__PRIVATE_convertedValue","__PRIVATE_parsed","merge","mergeFields","__PRIVATE__dataReader","__PRIVATE_parseMergeData","__PRIVATE_parseSetData","__PRIVATE_fieldOrUpdateData","__PRIVATE_parseUpdateVarargs","__PRIVATE_parseUpdateData","__PRIVATE_verifyNotCommitted","__PRIVATE__mutations","__PRIVATE__committed","__PRIVATE_currArg","__PRIVATE_internalOptions","complete","__PRIVATE_onSnapshotInternal","__PRIVATE_errHandler","__PRIVATE_internalListener","__PRIVATE_validateGetOptions","__PRIVATE_getDocumentFromLocalCache","__PRIVATE_getViaSnapshotListener","Rl","metadata","__PRIVATE__document","__PRIVATE__fromCache","__PRIVATE__hasPendingWrites","__PRIVATE_validateSnapshotOptions","QueryDocumentSnapshot","fromFirestore","__PRIVATE_UserDataWriter","__PRIVATE__areTimestampsInSnapshotsEnabled","serverTimestamps","SnapshotMetadata","__PRIVATE__query","opStr","__PRIVATE_fieldValue","__PRIVATE_enums","__PRIVATE_validateDisjunctiveFilterElements","__PRIVATE_referenceList","__PRIVATE_parseDocumentIdValue","__PRIVATE_parseQueryValue","__PRIVATE_validateNewFilter","__PRIVATE_addFilter","directionStr","__PRIVATE_validateNewOrderBy","__PRIVATE_addOrderBy","__PRIVATE_withLimitToFirst","__PRIVATE_withLimitToLast","__PRIVATE_docOrField","__PRIVATE_boundFromDocOrFields","__PRIVATE_withStartAt","__PRIVATE_withEndAt","__PRIVATE_boundFromDocument","Vf","__PRIVATE_allFields","__PRIVATE_boundFromFields","components","__PRIVATE_rawValue","__PRIVATE_wrapped","__PRIVATE_validateHasExplicitOrderByForLimitToLast","QuerySnapshot","__PRIVATE_firestoreClient","__PRIVATE_getDocumentsFromLocalCache","__PRIVATE_documentIdValue","operator","__PRIVATE_arrayOps","__PRIVATE_disjunctiveOps","__PRIVATE_isArrayOp","__PRIVATE_isDisjunctiveOp","__PRIVATE_existingField","__PRIVATE_validateOrderByAndInequalityMatch","__PRIVATE_conflictingOp","__PRIVATE_findFilterOperator","__PRIVATE_inequality","__PRIVATE__originalQuery","__PRIVATE__snapshot","thisArg","__PRIVATE_convertToDocumentImpl","__PRIVATE__cachedChanges","__PRIVATE__cachedChangesIncludeMetadataChanges","oldIndex","newIndex","__PRIVATE_indexTracker","__PRIVATE_resultChangeType","__PRIVATE__path","toFirestore","__PRIVATE_docRef","__PRIVATE_typeDescription","__PRIVATE_validator","__PRIVATE_validateOptionalArrayElements","__PRIVATE_applyFirestoreDataConverter","__PRIVATE_makeConstructorPrivate","__PRIVATE_cls","__PRIVATE_optionalMessage","__PRIVATE_PublicConstructor","assign","__PRIVATE_PublicFirestore","__PRIVATE_PublicTransaction","__PRIVATE_PublicWriteBatch","__PRIVATE_PublicDocumentReference","__PRIVATE_PublicDocumentSnapshot","__PRIVATE_PublicQueryDocumentSnapshot","__PRIVATE_PublicQuery","__PRIVATE_PublicQuerySnapshot","__PRIVATE_PublicCollectionReference","__PRIVATE_PublicFieldValue","__PRIVATE_ArrayUnionFieldValueImpl","__PRIVATE_ArrayRemoveFieldValueImpl","__PRIVATE_NumericIncrementFieldValueImpl","__PRIVATE_PublicBlob","__PRIVATE_firestoreNamespace","FieldValue","__PRIVATE_onNetworkAvailable","__PRIVATE_onNetworkUnavailable","__PRIVATE_configureNetworkMonitoring","__PRIVATE_networkAvailableListener","__PRIVATE_networkUnavailableListener","__PRIVATE_sendFn","__PRIVATE_closeFn","__PRIVATE_wrappedOnOpen","__PRIVATE_wrappedOnClose","__PRIVATE_wrappedOnMessage","__PRIVATE_RPC_NAME_REST_MAPPING","BatchGetDocuments","Commit","__PRIVATE_X_GOOG_API_CLIENT_VALUE","info","__PRIVATE_baseUrl","__PRIVATE_header","url","__PRIVATE_makeUrl","__PRIVATE_xhr","XhrIo","listenOnce","EventType","COMPLETE","getLastErrorCode","ErrorCode","NO_ERROR","json","getResponseJson","TIMEOUT","HTTP_ERROR","getStatus","getResponseText","__PRIVATE_responseError","__PRIVATE_firestoreErrorCode","__PRIVATE_serverError","toLowerCase","__PRIVATE_jsonObj","__PRIVATE_requestString","Content-Type","__PRIVATE_modifyHeadersForRequest","__PRIVATE_urlParts","__PRIVATE_webchannelTransport","createWebChannelTransport","httpSessionIdParam","initMessageHeaders","messageUrlParams","sendRawJson","supportsCrossDomainXhr","internalChannelParams","forwardChannelRequestTimeoutMs","isMobileCordova","isReactNative","isElectron","isIE","isUWP","isBrowserExtension","httpHeadersOverwriteParam","channel","createWebChannel","__PRIVATE_opened","closed","__PRIVATE_streamBridge","__PRIVATE_StreamBridge","zf","open","Yf","__PRIVATE_unguardedEventListen","param","WebChannel","OPEN","CLOSE","__PRIVATE_callOnClose","MESSAGE","__PRIVATE_msgData","__PRIVATE_msgDataOrError","__PRIVATE_callOnMessage","__PRIVATE_callOnOpen","__PRIVATE_urlRpcName","__PRIVATE_registerFirestore","__PRIVATE_firestoreFactory","registerComponent","Component","container","getProvider","setServiceProps","registerVersion","__PRIVATE_setPlatform","__PRIVATE_WebChannelConnection","__PRIVATE_BrowserConnectivityMonitor","__PRIVATE_isAvailable","__PRIVATE_NoopConnectivityMonitor","__PRIVATE_JsonProtoSerializer","hi","__PRIVATE_encoded","raw","__PRIVATE_nBytes","crypto","self","msCrypto","getRandomValues"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;2DAoBaA,IAAcC,EAASD;ICUlCE,WAAqBC;QAAAC,WAAAD;;WAErBD,gBAAAA;QACE,OAAmB,QAAZE,KAAKD;;;;;;IAOdD,gBAAAA;QACE,OAAIE,KAAKC,MACA,SAASD,KAAKD,MAEd;OAIXD,sBAAAA,SAAQI;QACN,OAAOA,EAAUH,QAAQC,KAAKD;;;;;;;;;;;;;;;;;;;;;;;;mDA1BE,IAAII,EAAK;;;AAI3CA,MAAqC,IAAIA,EAAK,2BAC9CA,MAA8B,IAAIA,EAAK;;;;;;;;;;;;;;;;;;ACHlC,IAAMC,IAAO;;;;IAIlBC,IAAI;;IAGJC,WAAW;;IAGXC,SAAS;;;;;;;IAQTC,kBAAkB;;;;;;;;IASlBC,mBAAmB;;IAGnBC,WAAW;;;;;IAMXC,gBAAgB;;;;;;;;IAShBC,mBAAmB;;;;;IAMnBC,iBAAiB;;;;;IAMjBC,oBAAoB;;;;;;;;;;;;;;;;;;;;;IAsBpBC,qBAAqB;;;;;;;;IASrBC,SAAS;;;;;;;;;;;;;;;;IAiBTC,cAAc;;IAGdC,eAAe;;;;;IAMfC,UAAU;;;;;;;;IASVC,aAAa;;IAGbC,WAAW;;IAaXvB,WAAqBwB,GAAqBC;QAA1CzB;gBACE0B,IAAAA,aAAMD,mBADaD,GAAqBtB,YAAAuB,GAH1CvB,SAAO;;;;QASLA,EAAKyB,WAAW;YAAM,OAAGzB,EAAK0B,oBAAe1B,EAAKsB,eAAUtB,EAAKuB;;;WAVjCI;EAAAA,YCjGlC7B,SAAY8B,GAAsBC;IAAA7B,YAAA6B,GAFlC7B,YAAO,SAGLA,KAAK8B,IAAc;;IAEnB9B,KAAK8B,EAA2BC,gBAAI,YAAUH;;IAqClD9B;;;;;;QAMEE,SAA0D;;WAE1DF,uBAAAA;QACE,OAAOkC,QAAQC,QAAsB;OAGvCnC,gBAAAA,eAEAA,gBAAAA,SAAkBoC;QAKhBlC,KAAKkC,IAAiBA;;QAEtBA,EAAe/B,EAAKU;OAGtBf,gBAAAA;QAKEE,KAAKkC,IAAiB;;;IA4BxBpC,WAAYqC;QAAZrC;;;;mBAnBAE,SAAiE;;QAGzDA,mBAAoBG,EAAKU,iBACjCb;;;;;QAMAA,SAAuB;;QAGvBA,SAA0D,MAElDA,wBAKNA,KAAKoC,IAAgB;YACnBpC,EAAKqC,KACLrC,EAAKsC,cAActC,EAAKuC,KACxBvC,EAAKwC,QACDxC,EAAKkC,KACPlC,EAAKkC,EAAelC,EAAKsC;WAI7BtC,KAAKqC,IAAe,GAEpBrC,KAAKyC,OAAON,EAAaO,aAAa;YAAEC;YAEpC3C,KAAKyC,OACPzC,KAAKyC,KAAKG,qBAAqB5C,KAAmB6C;;QAGlD7C,KAAKoC,EAAc,OACnBD,EAAaW,MAAMC,MACjBN,SAAAA;YACEzC,EAAKyC,OAAOA,GACRzC,EAAKoC;;YAEPpC,EAAKyC,KAAKG,qBAAqB5C,EAAKoC;aAGxC;;WAONtC,uBAAAA;QAAAA,cASQkD,IAAsBhD,KAAKqC,GAC3BY,IAAejD,KAAKiD;;;;gBAG1B,OAFAjD,KAAKiD,mBAEAjD,KAAKyC,OAIHzC,KAAKyC,KAAKS,SAASD,GAAcF,MAAKI,SAAAA;;;;YAI3C,IAAInD,EAAKqC,MAAiBW,GACxB,MAAM,IAAII,EACRhD,EAAKY,SACL;YAGF,OAAImC,KACFE,GACmC,mBAA1BF,EAAUG,cAGZ,IAAIC,EAAWJ,EAAUG,aAAatD,EAAKsC,gBAE3C;cApBJN,QAAQC,QAAQ;OA0B3BnC,gBAAAA;QACEE,KAAKiD;OAGPnD,gBAAAA,SAAkBoC;QAKhBlC,KAAKkC,IAAiBA;;QAGlBlC,KAAKwC,KACPN,EAAelC,KAAKsC;OAIxBxC,gBAAAA;QAUME,KAAKyC,QACPzC,KAAKyC,KAAKe,wBAAwBxD,KAAmB6C,IAEvD7C,KAAKoC,IAAgB,MACrBpC,KAAKkC,IAAiB;;;;;;IAOhBpC,gBAAAA;QACN,IAAM2D,IAAazD,KAAKyC,QAAQzC,KAAKyC,KAAKiB;QAK1C,OAJAL,GACiB,SAAfI,KAA6C,mBAAfA,IAGzB,IAAItD,EAAKsD;;;IAwBlB3D,WAAoB6D,GAAoBC;iBAApBD,YAAoBC,GAHxC5D,YAAO,cACPA,YAAOG,EAAK0D;;WAIZC;aAAAA;YACE,IAAMC,IAAwC;gBAC5CC,mBAAmBhE,KAAK4D;eAEpBK,IAAajE,KAAK2D,EAAKlB,KAAKyB,EAAgC;YAIlE,OAHID,MACFF,EAAuBhC,gBAAIkC,IAEtBF;;;;;;IAUTjE,WAAoB6D,GAAoBC;iBAApBD,YAAoBC;;WAExC9D,uBAAAA;QACE,OAAOkC,QAAQC,QAAQ,IAAIkC,EAAgBnE,KAAK2D,GAAM3D,KAAK4D;OAG7D9D,gBAAAA,SAAkBoC;;QAEhBA,EAAe/B,EAAK0D;OAGtB/D,gBAAAA,eAEAA,gBAAAA;;ICzSAA,WAAqBsE,GAA0BC;QAC7C,IADmBrE,eAAAoE,GAA0BpE,mBAAAqE,GACzCA,IAAc,GAChB,MAAM,IAAIjB,EACRhD,EAAKI,kBACL,yCAAyC6D;QAG7C,IAAIA,KAAe,KACjB,MAAM,IAAIjB,EACRhD,EAAKI,kBACL,yCAAyC6D;QAG7C,IAAID,KA9BY,aA+Bd,MAAM,IAAIhB,EACRhD,EAAKI,kBACL,qCAAqC4D;;gBAIzC,IAAIA,KAAW,cACb,MAAM,IAAIhB,EACRhD,EAAKI,kBACL,qCAAqC4D;;mBArC3CtE;QACE,OAAOwE,EAAUC,WAAWC,KAAKC;oBAGnC3E,SAAgB4E;QACd,OAAOJ,EAAUC,WAAWG,EAAKC;sBAGnC7E,SAAkB8E;QAChB,IAAMR,IAAUS,KAAKC,MAAMF,IAAe;QAE1C,OAAO,IAAIN,EAAUF,GAD2B,OAAjCQ,IAAyB,MAAVR;OAgChCtE,qBAAAA;QACE,OAAO,IAAI0E,KAAKxE,KAAK+E;OAGvBjF,uBAAAA;QACE,OAAsB,MAAfE,KAAKoE,UAAiBpE,KAAKqE,cAAc;OAGlDvE,gBAAAA,SAAWkF;QACT,OAAIhF,KAAKoE,YAAYY,EAAMZ,UAClBa,GAAoBjF,KAAKqE,aAAaW,EAAMX,eAE9CY,GAAoBjF,KAAKoE,SAASY,EAAMZ;OAGjDtE,sBAAAA,SAAQkF;QACN,OACEA,EAAMZ,YAAYpE,KAAKoE,WAAWY,EAAMX,gBAAgBrE,KAAKqE;OAIjEvE,uBAAAA;QACE,OACE,uBACAE,KAAKoE,UACL,mBACApE,KAAKqE,cACL;OAIJvE,sBAAAA;;;;;;;QAOE,IAAMoF,IAAkBlF,KAAKoE,WAnFb;;gBAuFhB,OAFyBe,OAAOD,GAAiBE,SAAS,IAAI,OAEpC,MADGD,OAAOnF,KAAKqE,aAAae,SAAS,GAAG;;;IC3EpEtF,WAA4BuF;QAAArF,iBAAAqF;;iBAR5BvF,SAAqB8B;QACnB,OAAO,IAAI0D,EAAgB1D;eAG7B9B;QACE,OAAO,IAAIwF,EAAgB,IAAIhB,EAAU,GAAG;OAK9CxE,gBAAAA,SAAUkF;QACR,OAAOhF,KAAKqF,UAAUE,EAAWP,EAAMK;OAGzCvF,sBAAAA,SAAQkF;QACN,OAAOhF,KAAKqF,UAAUG,QAAQR,EAAMK;;iFAItCvF,gBAAAA;;QAEE,OAAgC,MAAzBE,KAAKqF,UAAUjB,UAAgBpE,KAAKqF,UAAUhB,cAAc;OAGrEvE,uBAAAA;QACE,OAAO,qBAAqBE,KAAKqF,UAAU5D,aAAa;OAG1D3B,gBAAAA;QACE,OAAOE,KAAKqF;;;ICvBdvF,WAAY2F,GAAoBC,GAAiBC;mBAC3CD,IACFA,IAAS,IACAA,IAASD,EAASE,UAC3BC,iBAGED,IACFA,IAASF,EAASE,SAASD,IAClBC,IAASF,EAASE,SAASD,KACpCE;QAEF5F,KAAKyF,WAAWA,GAChBzF,KAAK0F,SAASA,GACd1F,KAAK6F,IAAMF;;WAqBbA;aAAAA;YACE,OAAO3F,KAAK6F;;;;QAGd/F,sBAAAA,SAAQkF;QACN,OAA4C,MAArCc,EAASC,EAAW/F,MAAMgF;OAGnClF,oBAAAA,SAAMkG;QACJ,IAAMP,IAAWzF,KAAKyF,SAASQ,MAAMjG,KAAK0F,QAAQ1F,KAAKkG;QAQvD,OAPIF,aAAsBF,IACxBE,EAAWG,SAAQC,SAAAA;YACjBX,EAASY,KAAKD;cAGhBX,EAASY,KAAKL,IAEThG,KAAKsG,EAAUb;;+DAIhB3F,oBAAAA;QACN,OAAOE,KAAK0F,SAAS1F,KAAK2F;OAG5B7F,gBAAAA,SAASyG;QAMP,OALAA,eAAOA,IAAqB,IAAIA,GAKzBvG,KAAKsG,EACVtG,KAAKyF,UACLzF,KAAK0F,SAASa,GACdvG,KAAK2F,SAASY;OAIlBzG,gBAAAA;QAEE,OAAOE,KAAKsG,EAAUtG,KAAKyF,UAAUzF,KAAK0F,QAAQ1F,KAAK2F,SAAS;OAGlE7F,gBAAAA;QAEE,OAAOE,KAAKyF,SAASzF,KAAK0F;OAG5B5F,gBAAAA;QACE,OAAOE,KAAK8C,IAAI9C,KAAK2F,SAAS;OAGhC7F,kBAAAA,SAAI0G;QAEF,OAAOxG,KAAKyF,SAASzF,KAAK0F,SAASc;OAGrC1G,gBAAAA;QACE,OAAuB,MAAhBE,KAAK2F;OAGd7F,gBAAAA,SAAWkF;QACT,IAAIA,EAAMW,SAAS3F,KAAK2F,QACtB;QAGF,KAAK,IAAIc,IAAI,GAAGA,IAAIzG,KAAK2F,QAAQc,KAC/B,IAAIzG,KAAK8C,IAAI2D,OAAOzB,EAAMlC,IAAI2D,IAC5B;QAIJ;OAGF3G,gBAAAA,SAAoB4G;QAClB,IAAI1G,KAAK2F,SAAS,MAAMe,EAAef,QACrC;QAGF,KAAK,IAAIc,IAAI,GAAGA,IAAIzG,KAAK2F,QAAQc,KAC/B,IAAIzG,KAAK8C,IAAI2D,OAAOC,EAAe5D,IAAI2D,IACrC;QAIJ;OAGF3G,sBAAAA,SAAQ6G;QACN,KAAK,IAAIF,IAAIzG,KAAK0F,QAAQkB,IAAM5G,KAAKkG,SAASO,IAAIG,GAAKH,KACrDE,EAAG3G,KAAKyF,SAASgB;OAIrB3G,gBAAAA;QACE,OAAOE,KAAKyF,SAASQ,MAAMjG,KAAK0F,QAAQ1F,KAAKkG;aAG/CpG,SACE+G,GACAC;QAGA,KADA,IAAMjB,IAAMhB,KAAKkC,IAAIF,EAAGlB,QAAQmB,EAAGnB,SAC1Bc,IAAI,GAAGA,IAAIZ,GAAKY,KAAK;YAC5B,IAAMO,IAAOH,EAAG/D,IAAI2D,IACdQ,IAAQH,EAAGhE,IAAI2D;YACrB,IAAIO,IAAOC,GACT,QAAQ;YAEV,IAAID,IAAOC,GACT,OAAO;;QAGX,OAAIJ,EAAGlB,SAASmB,EAAGnB,UACT,IAENkB,EAAGlB,SAASmB,EAAGnB,SACV,IAEF;;;;;;WAQuBG,SACtBhG,gBAAAA,SACR2F,GACAC,GACAC;QAEA,OAAO,IAAIuB,EAAazB,GAAUC,GAAQC;OAG5C7F,gBAAAA;;;;QAKE,OAAOE,KAAKmH,IAAUC,KAAK;OAG7BtH,uBAAAA;QACE,OAAOE,KAAKqH;;;;;UAMdvH,SAAkBwH;;;;QAKhB,IAAIA,EAAKC,QAAQ,SAAS,GACxB,MAAM,IAAInE,EACRhD,EAAKI,kBACL,mBAAiB8G;;;gBAQrB,OAAO,IAAIJ,EAFMI,EAAKE,MAAM,KAAKC,QAAOrB,SAAAA;YAAWA,OAAAA,EAAQT,SAAS;;;EAtCtCG;;;;;;;GA2CzBoB,OAAa,IAAIA,EAAa;;AAGvC,IAAMQ,IAAmB;;;;WAGM5B,SACnBhG,gBAAAA,SACR2F,GACAC,GACAC;QAEA,OAAO,IAAIgC,EAAUlC,GAAUC,GAAQC;;;;;;UAOjC7F,SAAyBsG;QAC/B,OAAOsB,EAAiBE,KAAKxB;OAG/BtG,gBAAAA;QACE,OAAOE,KAAKmH,IACTU,KAAIC,SAAAA;mBACHA,IAAMA,EAAIC,QAAQ,MAAM,QAAQA,QAAQ,KAAK,QACxCJ,EAAUK,EAAkBF,OAC/BA,IAAM,MAAMA,IAAM;YAEbA;YAERV,KAAK;OAGVtH,uBAAAA;QACE,OAAOE,KAAKqH;;;;;IAMdvH,gBAAAA;QACE,OAAuB,MAAhBE,KAAK2F,UAnQiB,eAmQD3F,KAAK8C,IAAI;;;;;UAMvChD;QACE,OAAO,IAAI6H,EAAU,EA1QQ;;;;;;;;;;;;UAuR/B7H,SAAwBwH;QAmBtB,KAlBA,IAAM7B,IAAqB,IACvBwC,IAAU,IACVxB,IAAI,GAEFyB,IAAoB;YACxB,IAAuB,MAAnBD,EAAQtC,QACV,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,yBAAuB8G;YAI3B7B,EAASY,KAAK4B,IACdA,IAAU;WAGRE,QAEG1B,IAAIa,EAAK3B,UAAQ;YACtB,IAAMyC,IAAId,EAAKb;YACf,IAAU,SAAN2B,GAAY;gBACd,IAAI3B,IAAI,MAAMa,EAAK3B,QACjB,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,yCAAyC8G;gBAG7C,IAAMe,IAAOf,EAAKb,IAAI;gBACtB,IAAe,SAAT4B,KAA0B,QAATA,KAAyB,QAATA,GACrC,MAAM,IAAIjF,EACRhD,EAAKI,kBACL,uCAAuC8G;gBAG3CW,KAAWI,GACX5B,KAAK;mBACU,QAAN2B,KACTD,KAAeA,GACf1B,OACe,QAAN2B,KAAcD,KAIvBF,KAAWG,GACX3B,QAJAyB,KACAzB;;QAQJ,IAFAyB,KAEIC,GACF,MAAM,IAAI/E,EACRhD,EAAKI,kBACL,6BAA6B8G;QAIjC,OAAO,IAAIK,EAAUlC;;EAlHMK;;yEAqHtB6B,OAAa,IAAIA,EAAU;;;;;;;;;;;;;;;;;;;ICjVlC7H,WAAqBwH;QAAAtH,YAAAsH;;iBAQrBxH,SAAgB4B;QACd,OAAO,IAAI4G,EAAYpB,EAAaqB,EAAW7G,GAAM8G,EAAS;;0EAIhE1I,iBAAAA,SAAgB2I;QACd,OACEzI,KAAKsH,KAAK3B,UAAU,KACpB3F,KAAKsH,KAAKxE,IAAI9C,KAAKsH,KAAK3B,SAAS,OAAO8C;OAI5C3I,sBAAAA,SAAQkF;QACN,OACY,SAAVA,KAAqE,MAAnDkC,EAAanB,EAAW/F,KAAKsH,MAAMtC,EAAMsC;OAI/DxH,uBAAAA;QACE,OAAOE,KAAKsH,KAAK7F;aAKnB3B,SAAkB4I,GAAiBC;QACjC,OAAOzB,EAAanB,EAAW2C,EAAGpB,MAAMqB,EAAGrB;cAG7CxH,SAAqBwH;QACnB,OAAOA,EAAK3B,SAAS,KAAM;;;;;;;;WAS7B7F,SAAoB2F;QAClB,OAAO,IAAI6C,EAAY,IAAIpB,EAAazB,EAASQ;;;;;;;;;;;;;;;;;;;aC9CrC2C,EAAcC;IAC5B,IAAIC,IAAQ;IACZ,KAAK,IAAMC,KAAOF,GACZG,OAAOC,UAAUC,eAAeC,KAAKN,GAAKE,MAC5CD;IAGJ,OAAOA;;;SAGO3C,EACd0C,GACAlC;IAEA,KAAK,IAAMoC,KAAOF,GACZG,OAAOC,UAAUC,eAAeC,KAAKN,GAAKE,MAC5CpC,EAAGoC,GAAKF,EAAIE;;;SAKFK,EAAWP;IAKzB,KAAK,IAAME,KAAOF,GAChB,IAAIG,OAAOC,UAAUC,eAAeC,KAAKN,GAAKE,IAC5C;IAGJ;;;;;;;;;;;;;;;;;;;;;;;;;;cDFe,IAAIT,EAAY,IAAIpB,EAAa;;;IErBhDpH,WAAqCuJ;kBAAAA;;gCAErCvJ,SAAwBwJ;QAEtB,OAAO,IAAIC,EADUC,GAAgBC,KAAcC,KAAKJ;0BAI1DxJ,SAAsB6J;QAEpB,OAAO,IAAIJ;;;;iBA2B4BI;YAEzC,KADA,IAAIN,IAAe,IACV5C,IAAI,GAAGA,IAAIkD,EAAMhE,UAAUc,GAClC4C,KAAgBlE,OAAOyE,aAAaD,EAAMlD;YAE5C,OAAO4C;UAjC2CM;OAIlD7J,uBAAAA;QACE,OAAO0J,GAAgBC,KAAcI,KAAK7J,KAAKqJ;OAGjDvJ,2BAAAA;QACE,gBA8BuCuJ;YAEzC,KADA,IAAMS,IAAS,IAAIC,WAAWV,EAAa1D,SAClCc,IAAI,GAAGA,IAAI4C,EAAa1D,QAAQc,KACvCqD,EAAOrD,KAAK4C,EAAaW,WAAWvD;YAEtC,OAAOqD;UAnC6B9J,KAAKqJ;OAGzCvJ,iBAAAA;QACE,OAAkC,IAA3BE,KAAKqJ,GAAa1D;OAG3B7F,gBAAAA,SAAUkF;QACR,OAAOC,GAAoBjF,KAAKqJ,IAAcrE,EAAMqE;OAGtDvJ,sBAAAA,SAAQkF;QACN,OAAOhF,KAAKqJ,OAAiBrE,EAAMqE;;;;SCnCvBY,EAAkBrI;IAChC,OAAOA,QAAAA;;;yDAIOsI,EAAetI;;;IAG7B,QAAkB,MAAXA,KAAgB,IAAIA;;;;;;aCabuI,EAAkBvI;;IAEhC,OAPgC,sDAMlBA,QAAAA,aAAAA,EAAOwI,uCAAUC,WAAU,IAAYC,uCAAGC;;;;;;;;aAkD1CC,EAAkB5I;IAChC,IAAM6I,IAAiBC,EACrB9I,EAAMwI,SAAUC,OAA4BM,qBAAiBC;IAE/D,OAAO,IAAItG,EAAUmG,EAAerG,SAASqG,EAAeI;;;;;;;;;;;;;;;;;;;;OFxExB,IAAItB,EAAW;;IGK/CuB,IAAwB,IAAIC,OAChC;;0EAIcC,EAAUpJ;IACxB,OAAI,eAAeA,wBAER,kBAAkBA,2BAElB,kBAAkBA,KAAS,iBAAiBA,0BAE5C,oBAAoBA,6BAEpB,iBAAiBA,0BAEjB,gBAAgBA,wBAEhB,oBAAoBA,uBAEpB,mBAAmBA,4BAEnB,gBAAgBA,yBAEhB,cAAcA,IACnBuI,EAAkBvI,2DAnCSgE;;;sFA6CnBqF,EAAYjE,GAAiBC;IAC3C,IAAMiE,IAAWF,EAAUhE;IAE3B,IAAIkE,MADcF,EAAU/D,IAE1B;IAGF,QAAQiE;MACN;QACE;;MACF;QACE,OAAOlE,EAAKmE,iBAAiBlE,EAAMkE;;MACrC;QACE,OAAOX,EAAkBxD,GAAMxB,QAAQgF,EAAkBvD;;MAC3D;QACE,OAwBN,SAAyBD,GAAiBC;YACxC,IACiC,mBAAxBD,EAAK4D,kBACoB,mBAAzB3D,EAAM2D,kBACb5D,EAAK4D,eAAejF,WAAWsB,EAAM2D,eAAejF;;YAGpD,OAAOqB,EAAK4D,mBAAmB3D,EAAM2D;YAGvC,IAAMQ,IAAgBV,EAAmB1D,EAAoB4D,iBACvDS,IAAiBX,EAAmBzD,EAAqB2D;YAC/D,OACEQ,EAAchH,YAAYiH,EAAejH,WACzCgH,EAAcP,UAAUQ,EAAeR;SAd3C,CAxB6B7D,GAAMC;;MAC/B;QACE,OAAOD,EAAKuD,gBAAgBtD,EAAMsD;;MACpC;QACE,OA+CN,SAAoBvD,GAAiBC;YACnC,OAAOqE,EAAoBtE,EAAgBuE,YAAE/F,QAC3C8F,EAAoBrE,EAAiBsE;SAFzC,CA/CwBvE,GAAMC;;MAC1B;QACE,OAAOD,EAAKwE,mBAAmBvE,EAAMuE;;MACvC;QACE,OAkCN,SAAwBxE,GAAiBC;YACvC,OACEwE,EAAgBzE,EAAK0E,cAAeC,cAClCF,EAAgBxE,EAAMyE,cAAeC,aACvCF,EAAgBzE,EAAK0E,cAAeE,eAClCH,EAAgBxE,EAAMyE,cAAeE;SAL3C,CAlC4B5E,GAAMC;;MAC9B;QACE,gBA+CuBD,GAAiBC;YAC5C,IAAI,kBAAkBD,KAAQ,kBAAkBC,GAC9C,OACEwE,EAAgBzE,EAAK6E,kBAAkBJ,EAAgBxE,EAAM4E;YAE1D,IAAI,iBAAiB7E,KAAQ,iBAAiBC,GAAO;gBAC1D,IAAM6E,IAAKL,EAAgBzE,EAAiB+E,cACtCC,IAAKP,EAAgBxE,EAAkB8E;gBAE7C,OAAID,MAAOE,IACF9B,EAAe4B,OAAQ5B,EAAe8B,KAEtCC,MAAMH,MAAOG,MAAMD;;YAI9B;UA/DwBhF,GAAMC;;MAC5B;QACE,OAAOiF,GACLlF,EAAKmF,WAAYC,UAAU,IAC3BnF,EAAMkF,WAAYC,UAAU,IAC5BnB;;MAEJ;QACE,OA0DN,SAAsBjE,GAAiBC;YACrC,IAAMoF,IAAUrF,EAAKoD,SAAUC,UAAU,IACnCiC,IAAWrF,EAAMmD,SAAUC,UAAU;YAE3C,IAAIzB,EAAWyD,OAAazD,EAAW0D,IACrC;YAGF,KAAK,IAAMvD,KAAOsD,GAChB,IAAIA,EAAQnD,eAAeH,kBAEvBuD,EAASvD,OACRkC,EAAYoB,EAAQtD,IAAMuD,EAASvD,MAEpC;YAIN;SAlBF,CA1D0B/B,GAAMC;;MAC5B;QACE,OAhF6BrB;;;;SA8JnB2G,EACdC,GACAC;IAEA,mBACGD,EAASJ,UAAU,IAAIM,MAAKC,SAAAA;QAAK1B,OAAAA,EAAY0B,GAAGF;;;;SAIrCG,EAAa5F,GAAiBC;IAC5C,IAAMiE,IAAWF,EAAUhE,IACrB6F,IAAY7B,EAAU/D;IAE5B,IAAIiE,MAAa2B,GACf,OAAO5H,GAAoBiG,GAAU2B;IAGvC,QAAQ3B;MACN;QACE,OAAO;;MACT;QACE,OAAOjG,GAAoB+B,EAAkBmE,cAAElE,EAAmBkE;;MACpE;QACE,OAyBN,SAAwBnE,GAAiBC;YACvC,IAAM6F,IAAarB,EAAgBzE,EAAK6E,gBAAgB7E,EAAK+E,cACvDgB,IAActB,EAAgBxE,EAAM4E,gBAAgB5E,EAAM8E;YAEhE,OAAIe,IAAaC,KACP,IACCD,IAAaC,IACf,IACED,MAAeC,IACjB;;YAGHd,MAAMa,KACDb,MAAMc,KAAe,KAAK,IAE1B;SAfb,CAzB4B/F,GAAMC;;MAC9B;QACE,OAAO+F,EAAkBhG,EAAoB4D,gBAAE3D,EAAqB2D;;MACtE;QACE,OAAOoC,EACLxC,EAAkBxD,IAClBwD,EAAkBvD;;MAEtB;QACE,OAAOhC,GAAoB+B,EAAiBuD,aAAEtD,EAAkBsD;;MAClE;QACE,OAkFN,SACEvD,GACAC;YAEA,IAAMgG,IAAY3B,EAAoBtE,IAChCkG,IAAa5B,EAAoBrE;YACvC,OAAOgG,EAAUE,EAAUD;SAN7B,CAlF0BlG,EAAgBuE,YAAEtE,EAAiBsE;;MACzD;QACE,OAsDN,SAA2B6B,GAAkBC;YAG3C,KAFA,IAAMC,IAAeF,EAAS5F,MAAM,MAC9B+F,IAAgBF,EAAU7F,MAAM,MAC7Bf,IAAI,GAAGA,IAAI6G,EAAa3H,UAAUc,IAAI8G,EAAc5H,QAAQc,KAAK;gBACxE,IAAM+G,IAAavI,GAAoBqI,EAAa7G,IAAI8G,EAAc9G;gBACtE,IAAmB,MAAf+G,GACF,OAAOA;;YAGX,OAAOvI,GAAoBqI,EAAa3H,QAAQ4H,EAAc5H;SAThE,CAtD+BqB,EAAoBwE,gBAAEvE,EAAqBuE;;MACtE;QACE,OAgEN,SAA0BxE,GAAkBC;YAC1C,IAAMuG,IAAavI,GACjBwG,EAAgBzE,EAAK2E,WACrBF,EAAgBxE,EAAM0E;YAExB,OAAmB,MAAf6B,IACKA,IAEFvI,GACLwG,EAAgBzE,EAAK4E,YACrBH,EAAgBxE,EAAM2E;SAV1B,CAhE8B5E,EAAmB0E,eAAEzE,EAAoByE;;MACnE;QACE,OAqFN,SAAuB1E,GAAsBC;YAI3C,KAHA,IAAMwG,IAAYzG,EAAKoF,UAAU,IAC3BsB,IAAazG,EAAMmF,UAAU,IAE1B3F,IAAI,GAAGA,IAAIgH,EAAU9H,UAAUc,IAAIiH,EAAW/H,UAAUc,GAAG;gBAClE,IAAMkH,IAAUf,EAAaa,EAAUhH,IAAIiH,EAAWjH;gBACtD,IAAIkH,GACF,OAAOA;;YAGX,OAAO1I,GAAoBwI,EAAU9H,QAAQ+H,EAAW/H;SAV1D,CArF2BqB,EAAgBmF,YAAElF,EAAiBkF;;MAC1D;QACE,OAgGN,SAAqBnF,GAAoBC;YACvC,IAAMoF,IAAUrF,EAAKqD,UAAU,IACzBuD,IAAW5E,OAAO6E,KAAKxB,IACvBC,IAAWrF,EAAMoD,UAAU,IAC3ByD,IAAY9E,OAAO6E,KAAKvB;;;;;wBAM9BsB,EAASG,QACTD,EAAUC;YAEV,KAAK,IAAItH,IAAI,GAAGA,IAAImH,EAASjI,UAAUc,IAAIqH,EAAUnI,UAAUc,GAAG;gBAChE,IAAMuH,IAAa/I,GAAoB2I,EAASnH,IAAIqH,EAAUrH;gBAC9D,IAAmB,MAAfuH,GACF,OAAOA;gBAET,IAAML,IAAUf,EAAaP,EAAQuB,EAASnH,KAAK6F,EAASwB,EAAUrH;gBACtE,IAAgB,MAAZkH,GACF,OAAOA;;YAIX,OAAO1I,GAAoB2I,EAASjI,QAAQmI,EAAUnI;SAxBxD,CAhGyBqB,EAAcoD,UAAEnD,EAAemD;;MACpD;QACE,MA1M6BxE;;;;AAkOnC,SAASoH,EAAkBhG,GAAqBC;IAC9C,IACkB,mBAATD,KACU,mBAAVC,KACPD,EAAKrB,WAAWsB,EAAMtB,QAEtB,OAAOV,GAAoB+B,GAAMC;IAGnC,IAAMmE,IAAgBV,EAAmB1D,IACnCqE,IAAiBX,EAAmBzD,IAEpCuG,IAAavI,GACjBmG,EAAchH,SACdiH,EAAejH;IAEjB,OAAmB,MAAfoJ,IACKA,IAEFvI,GAAoBmG,EAAcP,OAAOQ,EAAeR;;;SAkFjDoD,EAAYrM;IAC1B,OAGF,SAASsM,EAActM;QACrB,OAAI,eAAeA,IACV,SACE,kBAAkBA,IACpB,KAAKA,EAAMuJ,eACT,kBAAkBvJ,IACpB,KAAKA,EAAMiK,eACT,iBAAiBjK,IACnB,KAAKA,EAAMmK,cACT,oBAAoBnK,IAuBjC,SAA2ByD;YACzB,IAAM8I,IAAsBzD,EAAmBrF;YAC/C,OAAO,UAAQ8I,EAAoB/J,gBAAW+J,EAAoBtD;SAFpE,CAtB6BjJ,EAAqBgJ,kBACrC,iBAAiBhJ,IACnBA,EAAM2I,cACJ,gBAAgB3I,IAgBpB0J,EAfqB1J,EAAiB2J,YAeN6C,aAd5B,oBAAoBxM,KA0BN4J,IAzBE5J,EAAqB4J;QA0BzClD,EAAY+F,EAAS7C,GAAgB/J,cAzBjC,mBAAmBG,IAqBvB,UADiB0M,IAnBE1M,EAAoB8J,eAoBvBC,iBAAY2C,EAAS1C,kBAnBjC,gBAAgBhK,IA4C7B,SAAuBuK;YAGrB,KAFA,IAAIoC,IAAS,KACTC,eACgBrC,IAAAA,EAAWC,UAAU,IAArBD,cAAAA;gBAAf,IAAMvK;gBACJ4M,IAGHA,SAFAD,KAAU,KAIZA,KAAUL,EAActM;;YAE1B,OAAO2M,IAAS;SAXlB,CA3CyB3M,EAAiBuK,cAC7B,cAAcvK,IAwB3B,SAAqBwI;YAOnB;;;YAJA,IAEImE,IAAS,KACTC,eACcC,IAJCzF,OAAO6E,KAAKzD,EAASC,UAAU,IAAI0D,QAIpCU,cAAAA;gBAAb,IAAM1F;gBACJyF,IAGHA,SAFAD,KAAU,KAIZA,KAAaxF,UAAOmF,EAAc9D,EAASC,OAAQtB;;YAErD,OAAOwF,IAAS;SAflB,CAvBuB3M,EAAewI,YAjWHxE;QAgXnC,IAA0B0I,GAIC9C;KA5ClB0C,CAActM;;;SA6IP8I,EACdhG;;;;IAOA,IAzcoDrB,KAocvCqB,IAKO,mBAATA,GAAmB;;;;QAK5B,IAAImG,IAAQ,GACN6D,IAAW5D,EAAsB6D,KAAKjK;QAE5C,IAjdkDrB,KAgdrCqL,IACTA,EAAS,IAAI;;YAEf,IAAIE,IAAUF,EAAS;YACvBE,KAAWA,IAAU,aAAaC,OAAO,GAAG,IAC5ChE,IAAQiE,OAAOF;;;gBAIjB,IAAMG,IAAa,IAAIvK,KAAKE;QAG5B,OAAO;YAAEN,SAFOS,KAAKC,MAAMiK,EAAWpK,YAAY;YAEhCkG,OAAAA;;;IAOlB,OAAO;QAAEzG,SAFOqH,EAAgB/G,EAAKN;QAEnByG,OADJY,EAAgB/G,EAAKmG;;;;;;;aASvBY,EAAgB7J;;IAE9B,OAAqB,mBAAVA,IACFA,IACmB,mBAAVA,IACTkN,OAAOlN,KAEP;;;+EAKK0J,EAAoB0D;IAClC,OAAoB,mBAATA,IACFzF,EAAW0F,iBAAiBD,KAE5BzF,EAAW2F,eAAeF;;;6EAKrBG,EAASC,GAAwBrG;IAC/C,OAAO;QACLyC,gBAAgB,cAAY4D,EAAWC,4BACrCD,EAAWE,2BACCvG,EAAIzB,KAAKD;;;;6DAKXkI,GACd3N;IAEA,SAASA,KAAS,kBAAkBA;;;;0DAgBtB4N,GACd5N;IAEA,SAASA,KAAS,gBAAgBA;;;wDAWpB6N,GACd7N;IAEA,SAASA,KAAS,eAAeA;;;gDAInB8N,GACd9N;IAEA,SAASA,KAAS,iBAAiBA,KAASqK,MAAM6C,OAAOlN,EAAMmK;;;uDAIjD4D,GACd/N;IAEA,SAASA,KAAS,cAAcA;;;;;;;;;;;;;;;;;;;;IC7hBhC9B;WAGAA,iBAAAA,SACE8P,GACAnF;QAEA,gBF1BFA,GACAmF;YAEA,IAAMxF,IAAyB;gBAC7BC,QAAQ;oBACNC,UAAY;wBACVC,aApB0B;;oBAsB5BI,sBAAwB;wBACtBC,gBAAgB;4BACdxG,SAASqG,EAAerG;4BACxByG,OAAOJ,EAAepG;;;;;YAU9B,OAJIuL,MACFxF,EAASC,OAA0BwF,qBAAID,IAGlC;gBAAExF,UAAAA;;aEKiCwF;OAG1C9P,iBAAAA,SACE8P,GACAE;QAEA,OAAOA;OAGThQ,iBAAAA,SAAiB8P;QACf,OAAO;;WAGT9P,sBAAAA,SAAQkF;QACN,OAAOA,aAAiB+K;;;;AArBnBA,cAAW,IAAIA;;;;IA2BtBjQ,WAAqBkQ;QAAAhQ,gBAAAgQ;;WAErBlQ,iBAAAA,SACE8P,GACAnF;QAEA,OAAOzK,KAAKiQ,MAAML;OAGpB9P,iBAAAA,SACE8P,GACAE;;;;QAKA,OAAO9P,KAAKiQ,MAAML;OAGZ9P,oBAAAA,SAAM8P;QAEZ,KADA,IAAMxD,IAAS8D,GAAwBN,iBAC5BO;YACJ/D,EAAOgE,MAAKC,SAAAA;gBAAWpF,OAAAA,EAAYoF,GAASF;mBAC/C/D,EAAO/F,KAAK8J;kBAFMnQ,IAAAA,KAAKgQ,UAALhQ,cAAAA;;;QAKtB,OAAO;YAAEmM,YAAY;gBAAEC,QAAAA;;;OAGzBtM,iBAAAA,SAAiB8P;QACf,OAAO;;WAGT9P,sBAAAA,SAAQkF;QACN,OACEA,aAAiBsL,KACjBpE,GAAYlM,KAAKgQ,UAAUhL,EAAMgL,UAAU/E;;;IAO/CnL,WAAqBkQ;QAAAhQ,gBAAAgQ;;WAErBlQ,iBAAAA,SACE8P,GACAnF;QAEA,OAAOzK,KAAKiQ,MAAML;OAGpB9P,iBAAAA,SACE8P,GACAE;;;;QAKA,OAAO9P,KAAKiQ,MAAML;OAGZ9P,oBAAAA,SAAM8P;QAEZ,KADA,IAAIxD,IAAS8D,GAAwBN,iBAC1BW;YACTnE,IAASA,EAAO3E,QAAO4I,SAAAA;wBAAYpF,EAAYoF,GAASE;;kBADnCvQ,IAAAA,KAAKgQ,UAALhQ,cAAAA;;;QAGvB,OAAO;YAAEmM,YAAY;gBAAEC,QAAAA;;;OAGzBtM,iBAAAA,SAAiB8P;QACf,OAAO;;WAGT9P,sBAAAA,SAAQkF;QACN,OACEA,aAAiBwL,KACjBtE,GAAYlM,KAAKgQ,UAAUhL,EAAMgL,UAAU/E;;;IAY/CnL,WACmB2Q,GACRC;QADQ1Q,kBAAAyQ,aACRC;;WAQX5Q,iBAAAA,SACE8P,GACAnF;;;;QAKA,IAAMkG,IAAY3Q,KAAK4Q,GAAiBhB,IAClCiB,IAAM7Q,KAAK8Q,SAASH,KAAa3Q,KAAK8Q,SAAS9Q,KAAK0Q;QAC1D,OAAInB,GAAUoB,MAAcpB,GAAUvP,KAAK0Q,MAClC1Q,KAAKyQ,WAAWM,GAAUF,KAE1B7Q,KAAKyQ,WAAWO,GAASH;OAIpC/Q,iBAAAA,SACE8P,GACAE;QAMA,OAAOA;;;;;;IAOThQ,iBAAAA,SAAiB8P;QACf,OD2VKL,GADgB3N,IC1VLgO,eDoVlBhO;YAEA,SAASA,KAAS,iBAAiBA;UAKCA,KC3VDgO,IAAiB;YAAE/D,cAAc;;YD0V7CjK;OCvVvB9B,sBAAAA,SAAQkF;QACN,OACEA,aAAiBiM,KACjBhG,EAAYjL,KAAK0Q,IAAS1L,EAAM0L;OAI5B5Q,uBAAAA,SAAS8B;QACf,OAAO6J,EAAgB7J,EAAMiK,gBAAgBjK,EAAMmK;;;;wDAIvD,UAASmE,GAAwBtO;IAC/B,OAAO4N,GAAQ5N,MAAUA,EAAMuK,WAAWC,SACtCxK,EAAMuK,WAAWC,OAAOnG,UACxB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IC7MJnG,WAAqBuK;QAAArK,cAAAqK;;;QAGnBA,EAAO0D,KAAKpG,EAAU5B;;;;;;;;WAcxBjG,iBAAAA,SAAOoR;QACL,KAA4BlR,WAAAA,IAAAA,KAAKqK,QAALrK,cAAAA;YAC1B,SAAkBmR,EAAWD,IAC3B;;QAGJ;OAGFpR,sBAAAA,SAAQkF;QACN,OAAOkH,GAAYlM,KAAKqK,QAAQrF,EAAMqF,SAAQ,SAAC+G,GAAGC;YAAMD,OAAAA,EAAE5L,QAAQ6L;;;;IAMpEvR,WACWwR,GACAC;QADAvR,aAAAsR,GACAtR,iBAAAuR;;WAGXzR,sBAAAA,SAAQkF;QACN,OACEhF,KAAKsR,MAAM9L,QAAQR,EAAMsM,UAAUtR,KAAKuR,UAAU/L,QAAQR,EAAMuM;;UAOpEzR;;;;;;;;;;;AAWW0R;;;;;;;;AAQAC;IARAzR,eAAAwR,GAQAxR,wBAAAyR;;IAkBX3R,WACW4R,GACAC;QADA3R,kBAAA0R,GACA1R,cAAA2R;;;kBASX7R;QACE,OAAO,IAAI8R;;sEAIb9R,SAAc6R;QACZ,OAAO,IAAIC,UAAwBD;;8FAIrC7R,SAAkB0R;QAChB,OAAO,IAAII,EAAaJ;OAI1BK;gEAAAA;YACE,kBAAO7R,KAAK0R,yBAA4B1R,KAAK2R;;;;;;;;;IAO/C7R,iBAAAA,SAAWgS;QACT,kBAAI9R,KAAK0R,aAELI,aAAoBC,MACpBD,EAASN,QAAQhM,QAAQxF,KAAK0R,yBAEvB1R,KAAK2R,UACP3R,KAAK2R,WAAWG,aAAoBC;OAO/CjS,sBAAAA,SAAQkF;QACN,OACEhF,KAAK2R,WAAW3M,EAAM2M,WACrB3R,KAAK0R,eACA1M,EAAM0M,cAAc1R,KAAK0R,WAAWlM,QAAQR,EAAM0M,eACnD1M,EAAM0M;;;;WAsHL5R,iBAAAA,SAAiBgS;;;;;;;WAejBhS,SACRgS;QAEA,OAAIA,aAAoBC,KACfD,EAASN,UAETlM,EAAgByB;;;IAU3BjH,WACWiJ,GACAnH,GACAoQ;QAHXlS;gBAKE0B,IAAAA,4BAJSuH,GACA/I,UAAA4B,UACAoQ,GAKFhS;;;WATsBiS,SAW/BnS,iBAAAA,SACEgS,GACAI;QAEAlS,KAAKmS,GAAiBL;;;;QAWtB,IAAMN,IAAUU,EAAeV;QAC/B,OAAO,IAAIO,GAAS/R,KAAK+I,KAAKyI,GAASxR,KAAK4B,OAAO;YACjDwQ;;OAIJtS,iBAAAA,SACEgS,GACAO,GACA5H;QAIA,IAFAzK,KAAKmS,GAAiBL,KAEjB9R,KAAKgS,GAAaM,GAAWR,IAChC,OAAOA;QAGT,IAAMN,IAAUS,GAASM,GAAuBT;QAChD,OAAO,IAAIC,GAAS/R,KAAK+I,KAAKyI,GAASxR,KAAK4B,OAAO;YACjD4Q;;OAIJ1S,iBAAAA,SAAiBgS;QACf,OAAO;OAGThS,sBAAAA,SAAQkF;QACN,OACEA,aAAiByN,KACjBzS,KAAK+I,IAAIvD,QAAQR,EAAM+D,QACvB/I,KAAK4B,MAAM4D,QAAQR,EAAMpD,UACzB5B,KAAKgS,GAAaxM,QAAQR,EAAMgN;;EA1DLC;IA6E/BnS,WACWiJ,GACA2J,GACAC,GACAX;QAJXlS;gBAME0B,IAAAA,4BALSuH,GACA/I,SAAA0S,UACAC,UACAX,GAKFhS;;;WAVwBiS,SAYjCnS,iBAAAA,SACEgS,GACAI;QASA,IAPAlS,KAAKmS,GAAiBL,KAOjB9R,KAAKgS,GAAaM,GAAWR;;;;;QAKhC,OAAO,IAAIc,GAAgB5S,KAAK+I,KAAKmJ,EAAeV;QAGtD,IAAMqB,IAAU7S,KAAK8S,GAAchB;QACnC,OAAO,IAAIC,GAAS/R,KAAK+I,KAAKmJ,EAAeV,SAASqB,GAAS;YAC7DT;;OAIJtS,iBAAAA,SACEgS,GACAO,GACA5H;QAIA,IAFAzK,KAAKmS,GAAiBL,KAEjB9R,KAAKgS,GAAaM,GAAWR,IAChC,OAAOA;QAGT,IAAMN,IAAUS,GAASM,GAAuBT,IAC1Ce,IAAU7S,KAAK8S,GAAchB;QACnC,OAAO,IAAIC,GAAS/R,KAAK+I,KAAKyI,GAASqB,GAAS;YAC9CL;;OAIJ1S,iBAAAA,SAAiBgS;QACf,OAAO;OAGThS,sBAAAA,SAAQkF;QACN,OACEA,aAAiB+N,KACjB/S,KAAK+I,IAAIvD,QAAQR,EAAM+D,QACvB/I,KAAK2S,GAAUnN,QAAQR,EAAM2N,OAC7B3S,KAAKgS,GAAaxM,QAAQR,EAAMgN;;;;;;;IAS5BlS,iBAAAA,SAAcgS;QACpB,IAAIY;QAMJ,OAJEA,IADEZ,aAAoBC,KACfD,EAASY,SAETM,GAAYC,SAEdjT,KAAKkT,GAAYR;OAGlB5S,iBAAAA,SAAY4S;QAAZ5S,cACAqT,IAAU,IAAIC,GAAmBV;QAWvC,OAVA1S,KAAK2S,GAAUtI,OAAOlE,SAAQ+K,SAAAA;YAC5B,KAAKA,EAAU9H,KAAW;gBACxB,IAAMiK,IAAWrT,EAAK0S,KAAKpB,MAAMJ;gBAChB,SAAbmC,IACFF,EAAQG,IAAIpC,GAAWmC,KAEvBF,EAAQI,OAAOrC;;aAIdiC,EAAQK;;EA/FgBvB;IAoHjCnS,WACWiJ,GACA0K;QAFX3T;gBAIE0B,IAAAA,4BAHSuH,GACA/I,oBAAAyT,GATFzT;;;;QAKTA,OAAwB4R,GAAaD;;WANAM,SAerCnS,iBAAAA,SACEgS,GACAI;QASA,IAPAlS,KAAKmS,GAAiBL,IAEtBzO,GACqC,QAAnC6O,EAAeT,oBAIZzR,KAAKgS,GAAaM,GAAWR;;;;;QAKhC,OAAO,IAAIc,GAAgB5S,KAAK+I,KAAKmJ,EAAeV;QAGtD,IAAMkC,IAAM1T,KAAK2T,GAAgB7B,IAC3BL,IAAmBzR,KAAK4T,GAC5B9B,GACAI,EAAgCT,mBAG5BD,IAAUU,EAAeV,SACzBqB,IAAU7S,KAAK6T,GAAgBH,EAAIhB,QAAQjB;QACjD,OAAO,IAAIM,GAAS/R,KAAK+I,KAAKyI,GAASqB,GAAS;YAC9CT;;OAIJtS,iBAAAA,SACEgS,GACAO,GACA5H;QAIA,IAFAzK,KAAKmS,GAAiBL,KAEjB9R,KAAKgS,GAAaM,GAAWR,IAChC,OAAOA;QAGT,IAAM4B,IAAM1T,KAAK2T,GAAgB7B,IAC3BL,IAAmBzR,KAAK8T,GAC5BrJ,GACAqH,GACAO,IAEIQ,IAAU7S,KAAK6T,GAAgBH,EAAIhB,QAAQjB;QACjD,OAAO,IAAIM,GAAS/R,KAAK+I,KAAK2K,EAAIlC,SAASqB,GAAS;YAClDL;;OAIJ1S,iBAAAA,SAAiBgS;QAEf,KADA,IAAIiC,IAAwC,aACf/T,IAAAA,KAAKyT,iBAALzT,cAAAA,KAAsB;YAA9C,IAAMgU,UACHC,IACJnC,aAAoBC,KAChBD,EAASR,MAAM0C,EAAe1C,iBAE9B4C,IAAeF,EAAezC,UAAUX,GAC5CqD,KAAiB;YAGC,QAAhBC,MAEAH,IADgB,QAAdA,SACeX,IAAqBE,IACpCU,EAAe1C,OACf4C,KAGWH,EAAWT,IAAIU,EAAe1C,OAAO4C;;QAIxD,OAAOH,IAAaA,EAAWP,OAAU;OAG3C1T,sBAAAA,SAAQkF;QACN,OACEA,aAAiBmP,KACjBnU,KAAK+I,IAAIvD,QAAQR,EAAM+D,QACvBmD,GAAYlM,KAAKyT,iBAAiBzO,EAAMyO,kBAAiB,SAACrC,GAAGC;YAC3DD,OAAAA,EAAE5L,QAAQ6L;eAEZrR,KAAKgS,GAAaxM,QAAQR,EAAMgN;;;;;;;;IAU5BlS,iBAAAA,SAAgBgS;QAStB,OAAOA;;;;;;;;;;;IAYDhS,iBAAAA,SACNuS,GACAuB;QAEA,IAAMnC,IAAgC;QA/lBjCpO,GAimBHrD,KAAKyT,gBAAgB9N,WAAWiO,EAAuBjO;QAKzD,KAAK,IAAIc,IAAI,GAAGA,IAAImN,EAAuBjO,QAAQc,KAAK;YACtD,IAAMuN,IAAiBhU,KAAKyT,gBAAgBhN,IACtC8K,IAAYyC,EAAezC,WAC7B3B,IAAkC;YAClCyC,aAAmBN,OACrBnC,IAAgByC,EAAQf,MAAM0C,EAAe1C,SAE/CG,EAAiBpL,KACfkL,EAAU6C,GACRxE,GACAgE,EAAuBnN;;QAI7B,OAAOgL;;;;;;;;;;;;;;IAeD3R,iBAAAA,SACN2K,GACAqH,GACAO;QAGA,KADA,IAAMZ,IAAgC,WACTzR,IAAAA,KAAKyT,iBAALzT,cAAAA,KAAsB;YAA9C,IAAMgU,UACHzC,IAAYyC,EAAezC,WAE7B3B,IAAkC;YAClCkC,aAAoBC,OACtBnC,IAAgBkC,EAASR,MAAM0C,EAAe1C,SAG1B,SAAlB1B,KAA0ByC,aAAmBN;;;;;YAK/CnC,IAAgByC,EAAQf,MAAM0C,EAAe1C,SAG/CG,EAAiBpL,KACfkL,EAAU8C,GAAiBzE,GAAenF;;QAG9C,OAAOgH;OAGD3R,iBAAAA,SACN4S,GACAjB;QAQA,KADA,IAAM0B,IAAU,IAAIC,GAAmBV,IAC9BjM,IAAI,GAAGA,IAAIzG,KAAKyT,gBAAgB9N,QAAQc,KAAK;YACpD,IACMyK,IADiBlR,KAAKyT,gBAAgBhN,GACX6K;YACjC6B,EAAQG,IAAIpC,GAAWO,EAAiBhL;;QAE1C,OAAO0M,EAAQK;;EAzNoBvB;IA+NrCnS,WAAqBiJ,GAA2BiJ;QAAhDlS;gBACE0B,IAAAA,4BADmBuH,UAA2BiJ,GAIvChS;;WALyBiS,SAOlCnS,iBAAAA,SACEgS,GACAI;;;;QAaA,OAXAlS,KAAKmS,GAAiBL,IAWf,IAAIwC,GAAWtU,KAAK+I,KAAKmJ,EAAeV,SAAS;YACtDY;;OAIJtS,iBAAAA,SACEgS,GACAO,GACA5H;QAIA,OAFAzK,KAAKmS,GAAiBL,IAEjB9R,KAAKgS,GAAaM,GAAWR,KAU3B,IAAIwC,GAAWtU,KAAK+I,KAAKzD,EAAgByB,SATvC+K;OAYXhS,iBAAAA,SAAiBgS;QACf,OAAO;OAGThS,sBAAAA,SAAQkF;QACN,OACEA,aAAiBuP,KACjBvU,KAAK+I,IAAIvD,QAAQR,EAAM+D,QACvB/I,KAAKgS,GAAaxM,QAAQR,EAAMgN;;EAvDFC;IAoElCnS,WAAqBiJ,GAA2BiJ;QAAhDlS;gBACE0B,IAAAA,4BADmBuH,UAA2BiJ,GAIvChS;;WALyBiS,SAOlCnS,iBAAAA,SACEgS,GACAI;QAEAtM;OAGF9F,iBAAAA,SACEgS,GACAO,GACA5H;QAEA7E;OAGF9F,iBAAAA,SAAiBgS;QACflM;OAGF9F,sBAAAA,SAAQkF;QACN,OACEA,aAAiBwP,KACjBxU,KAAK+I,IAAIvD,QAAQR,EAAM+D,QACvB/I,KAAKgS,GAAaxM,QAAQR,EAAMgN;;EA9BFC;ICjvBlCnS,WAA4B2U;QAAAzU,aAAAyU;;qBAO5B3U;QACE,OAAO,IAAIkT,EAAY;YAAE5I,UAAU;;;;;;;;;IASrCtK,oBAAAA,SAAMwH;QACJ,IAAIA,EAAK8B,KACP,OAAOpJ,KAAKyU;QAGZ,KADA,IAAI7S,IAAmB5B,KAAKyU,OACnBhO,IAAI,GAAGA,IAAIa,EAAK3B,SAAS,KAAKc,GAAG;YACxC,KAAK7E,EAAMwI,SAAUC,QACnB,OAAO;YAGT,KAAKsF,GADL/N,IAAQA,EAAMwI,SAAUC,OAAO/C,EAAKxE,IAAI2D,MAEtC,OAAO;;QAKX,QADA7E,KAASA,EAAMwI,SAAUC,UAAU,IAAI/C,EAAKoN,SAC5B;OAIpB5U,sBAAAA,SAAQkF;QACN,OAAOiG,EAAYjL,KAAKyU,OAAOzP,EAAMyP;;;;;;IAsBvC3U,WAA6BiU;yBAAAA,IAA0Bf,GAAYC,oBAAtCc;;QAL7B/T,UAAqB,IAAI2U;;;;;;;;kBAczB7U,kBAAAA,SAAIwH,GAAiB1F;QAMnB,OADA5B,KAAK4U,GAAWtN,GAAM1F,IACf5B;;;;;;;;;IAUTF,qBAAAA,SAAOwH;QAML,OADAtH,KAAK4U,GAAWtN,GAAM,OACftH;;;;;;IAODF,iBAAAA,SAAWwH,GAAiB1F;QAGlC,KAFA,IAAIiT,IAAe7U,KAAK8U,IAEfrO,IAAI,GAAGA,IAAIa,EAAK3B,SAAS,KAAKc,GAAG;YACxC,IAAMsO,IAAiBzN,EAAKxE,IAAI2D,IAC5BuO,IAAeH,EAAa/R,IAAIiS;YAEhCC,aAAwBL;;YAE1BE,IAAeG,IAEfA,8BACAhK,EAAUgK;;YAGVA,IAAe,IAAIL,IACjB3L,OAAOiM,QAAQD,EAAa5K,SAAUC,UAAU,MAElDwK,EAAavB,IAAIyB,GAAgBC,IACjCH,IAAeG;;YAGfA,IAAe,IAAIL,KACnBE,EAAavB,IAAIyB,GAAgBC,IACjCH,IAAeG;;QAInBH,EAAavB,IAAIhM,EAAKoN,KAAe9S;;8DAIvC9B,iBAAAA;QACE,IAAMoV,IAAelV,KAAKmV,GACxBxN,EAAUyN,GACVpV,KAAK8U;QAEP,OAAoB,QAAhBI,IACK,IAAIlC,GAAYkC,KAEhBlV,KAAK+T;;;;;;;;;;;;;;IAgBRjU,iBAAAA,SACNuV,GACAC;QAFMxV,cAIFyV,QAEEtB,IAAgBjU,KAAK+T,GAAWzC,MAAM+D,IACtCG,IAAe7F,GAAWsE;0BAGvBA,EAAc7J,SAASC,UAC5B;QAkBJ,OAhBAiL,EAAgBnP,SAAQ,SAACvE,GAAO6T;YAC9B,IAAI7T,aAAiB+S,KAAK;gBACxB,IAAMe,IAAS1V,EAAKmV,GAAaE,EAAYM,MAAMF,IAAc7T;gBACnD,QAAV8T,MACFF,EAAaC,KAAeC,GAC5BH;mBAEiB,SAAV3T,KACT4T,EAAaC,KAAe7T,GAC5B2T,UACSC,EAAatM,eAAeuM,cAC9BD,EAAaC,IACpBF;aAIGA,IAAW;YAAEnL,UAAU;gBAAEC,QAAQmL;;YAAmB;;;;;;;;SAO/CI,GAAiBhU;IAC/B,IAAMyI,IAAsB;IAsB5B,OArBAlE,EAAQvE,EAAOyI,UAAU,KAAI,SAACtB,GAAKnH;QACjC,IAAMyT,IAAc,IAAI1N,EAAU,EAACoB;QACnC,IAAI4G,GAAW/N,IAAQ;YACrB,IACMiU,IADaD,GAAiBhU,EAAewI,UACnBC;YAChC,IAA4B,MAAxBwL,EAAalQ;;YAEf0E,EAAOhE,KAAKgP;;;YAIZ,KAAyBQ,WAAAA,IAAAA,GAAAA,cAAAA;gBAApB,IAAMC;gBACTzL,EAAOhE,KAAKgP,EAAYM,MAAMG;;;;;QAMlCzL,EAAOhE,KAAKgP;SAGT,IAAIU,GAAU1L;;;;;;;;;;;;;;;;;;;;;;;ICzOlB2L,aCMHlW,SAAqBiJ,GAA2ByI;IAA3BxR,WAAA+I,GAA2B/I,eAAAwR;;IAqBhD1R,WACEiJ,GACAyI,GACiByE,GACjBC;QAJFpW;gBAME0B,IAAAA,aAAMuH,GAAKyI,iBAHMyE,GAIjBjW,EAAKmW,OAAsBD,EAAQC,IACnCnW,EAAKoS,0BAA0B8D,EAAQ9D;;;WAZbgE,SAe5BtW,oBAAAA,SAAMwH;QACJ,OAAOtH,KAAKiW,GAAY3E,MAAMhK;OAGhCxH,mBAAAA;QACE,OAAOE,KAAKiW;OAGdnW,iBAAAA;QACE,OAAOE,KAAKiW,GAAYxB;OAG1B3U,sBAAAA,SAAQkF;QACN,OACEA,aAAiB+M,KACjB/R,KAAK+I,IAAIvD,QAAQR,EAAM+D,QACvB/I,KAAKwR,QAAQhM,QAAQR,EAAMwM,YAC3BxR,KAAKmW,OAAsBnR,EAAMmR,MACjCnW,KAAKoS,0BAA0BpN,EAAMoN,yBACrCpS,KAAKiW,GAAYzQ,QAAQR,EAAMiR;OAInCnW,uBAAAA;QACE,OACE,cAAYE,KAAK+I,aACf/I,KAAKwR,iBACFxR,KAAKiW,GAAYxU,wCACCzB,KAAKmW,sCACDnW,KAAKoS;OAIpCiE;aAAAA;YACE,OAAOrW,KAAKmW,MAAqBnW,KAAKoS;;;;;EAjDZgE;IA+E5BtW,WACEiJ,GACAyI,GACA0E;QAHFpW;gBAKE0B,IAAAA,aAAMuH,GAAKyI,YACNY,2BAA2B8D,MAAWA,EAAQ9D;;;WATvBgE,SAY9BtW,uBAAAA;QACE,OAAO,gBAAcE,KAAK+I,aAAQ/I,KAAKwR;OAGzC6E;aAAAA;YACE,OAAOrW,KAAKoS;;;;QAGdtS,sBAAAA,SAAQkF;QACN,OACEA,aAAiBsP,KACjBtP,EAAMoN,0BAA0BpS,KAAKoS,yBACrCpN,EAAMwM,QAAQhM,QAAQxF,KAAKwR,YAC3BxM,EAAM+D,IAAIvD,QAAQxF,KAAK+I;;EAzBGqN;;;;WAkCKA,SACnCtW,uBAAAA;QACE,OAAO,qBAAmBE,KAAK+I,aAAQ/I,KAAKwR;OAG9C6E;aAAAA;YACE;;;;QAGFvW,sBAAAA,SAAQkF;QACN,OACEA,aAAiB4N,KACjB5N,EAAMwM,QAAQhM,QAAQxF,KAAKwR,YAC3BxM,EAAM+D,IAAIvD,QAAQxF,KAAK+I;;EAbQqN;;;;;;;;;IC5HnCtW,WACWwH,GACAgP,GACAC,GACAC,GACAtQ,GACAuQ,GACAC;yBALAJ,4BACAC,0BACAC;yBACAtQ,4BACAuQ,4BACAC;QANA1W,YAAAsH,GACAtH,uBAAAsW,GACAtW,eAAAuW,GACAvW,eAAAwW,GACAxW,aAAAkG;QACAlG,eAAAyW,GACAzW,aAAA0W,GAjBX1W,UAA6C;;WAoB7CF,0BAAAA;QACE,IAAiC,SAA7BE,KAAK2W,IAA8B;YACrC,IAAI1I,IAAcjO,KAAKsH,KAAKD;YACC,SAAzBrH,KAAKsW,oBACPrI,KAAe,SAASjO,KAAKsW,kBAE/BrI,KAAe;YACfA,KAAejO,KAAKwW,QAAQ3O,KAAI+O,SAAAA;gBAAKA,OAAAA,EAAE3I;gBAAe7G,KAAK,MAC3D6G,KAAe,QACfA,KAAejO,KAAKuW,QAAQ1O,KAAI/D,SAAAA;gBAAKA,OAAAA,EAAEmK;gBAAe7G,KAAK,MAEtD6C,EAAkBjK,KAAKkG,WAC1B+H,KAAe,OACfA,KAAejO,KAAKkG,QAElBlG,KAAKyW,YACPxI,KAAe;YACfA,KAAejO,KAAKyW,QAAQxI,gBAE1BjO,KAAK0W,UACPzI,KAAe,QACfA,KAAejO,KAAK0W,MAAMzI;YAE5BjO,KAAK2W,KAAsB1I;;QAE7B,OAAOjO,KAAK2W;OAGd7W,uBAAAA;QACE,IAAIgI,IAAM9H,KAAKsH,KAAKD;QAmBpB,OAlB6B,SAAzBrH,KAAKsW,oBACPxO,KAAO,sBAAsB9H,KAAKsW;QAEhCtW,KAAKwW,QAAQ7Q,SAAS,MACxBmC,KAAO,iBAAe9H,KAAKwW,QAAQpP,KAAK;QAErC6C,EAAkBjK,KAAKkG,WAC1B4B,KAAO,cAAc9H,KAAKkG,QAExBlG,KAAKuW,QAAQ5Q,SAAS,MACxBmC,KAAO,iBAAe9H,KAAKuW,QAAQnP,KAAK;QAEtCpH,KAAKyW,YACP3O,KAAO,gBAAgB9H,KAAKyW,QAAQxI,gBAElCjO,KAAK0W,UACP5O,KAAO,cAAc9H,KAAK0W,MAAMzI;QAE3B,YAAUnG;OAGnBhI,sBAAAA,SAAQkF;QACN,IAAIhF,KAAKkG,UAAUlB,EAAMkB,OACvB;QAGF,IAAIlG,KAAKuW,QAAQ5Q,WAAWX,EAAMuR,QAAQ5Q,QACxC;QAGF,KAAK,IAAIc,IAAI,GAAGA,IAAIzG,KAAKuW,QAAQ5Q,QAAQc,KACvC,KAAKzG,KAAKuW,QAAQ9P,GAAGjB,QAAQR,EAAMuR,QAAQ9P,KACzC;QAIJ,IAAIzG,KAAKwW,QAAQ7Q,WAAWX,EAAMwR,QAAQ7Q,QACxC;QAGF,KAAK,IAAIc,IAAI,GAAGA,IAAIzG,KAAKwW,QAAQ7Q,QAAQc,KACvC,KAAKzG,KAAKwW,QAAQ/P,GAAGjB,QAAQR,EAAMwR,QAAQ/P,KACzC;QAIJ,OAAIzG,KAAKsW,oBAAoBtR,EAAMsR,qBAI9BtW,KAAKsH,KAAK9B,QAAQR,EAAMsC,YAKV,SAAjBtH,KAAKyW,UACAzW,KAAKyW,QAAQjR,QAAQR,EAAMyR,WACV,SAAlBzR,EAAMyR,aAKU,SAAfzW,KAAK0W,QACR1W,KAAK0W,MAAMlR,QAAQR,EAAM0R,SACT,SAAhB1R,EAAM0R;OAGZ5W,iBAAAA;QACE,OACEwI,EAAYuO,GAAc7W,KAAKsH,SACN,SAAzBtH,KAAKsW,mBACmB,MAAxBtW,KAAKwW,QAAQ7Q;;;;;;;ICzFjB7F,WACWwH,GACAgP,GACAQ,GACAN,GACAtQ,GACA6Q,gBACAN,GACAC;yBANAJ,4BACAQ,0BACAN;yBACAtQ,4BACA6Q,2BACAN;yBACAC,WAPA1W,YAAAsH,GACAtH,uBAAAsW,aACAQ;QACA9W,eAAAwW,GACAxW,aAAAkG,aACA6Q,GACA/W,eAAAyW,GACAzW,aAAA0W;QAjBX1W,UAA4C;;QAG5CA,UAAwC,MAgBlCA,KAAKyW,WACPzW,KAAKgX,GAAiBhX,KAAKyW,UAEzBzW,KAAK0W,SACP1W,KAAKgX,GAAiBhX,KAAK0W;;kBA3B/B5W,SAAcwH;QACZ,OAAO,IAAI2P,EAAM3P;OA8BnBiP;aAAAA;YACE,IAA6B,SAAzBvW,KAAKkX,IAA0B;gBACjClX,KAAKkX,KAAkB;gBAEvB,IAAMC,IAAkBnX,KAAKoX,MACvBC,IAAoBrX,KAAKsX;gBAC/B,IAAwB,SAApBH,KAAkD,SAAtBE;;;;gBAIzBF,EAAgBI,OACnBvX,KAAKkX,GAAgB7Q,KAAK,IAAImR,GAAQL,KAExCnX,KAAKkX,GAAgB7Q,KACnB,IAAImR,GAAQ7P,EAAU8P,mCAEnB;oBAQL,KADA,IAAIC,eACkB1X,IAAAA,KAAK8W,IAAL9W,cAAAA;wBAAjB,IAAMuW;wBACTvW,KAAKkX,GAAgB7Q,KAAKkQ,IACtBA,EAAQjF,MAAMiG,QAChBG;;oBAGJ,KAAKA,GAAkB;;;wBAGrB,IAAMC,IACJ3X,KAAK8W,GAAgBnR,SAAS,IAC1B3F,KAAK8W,GAAgB9W,KAAK8W,GAAgBnR,SAAS,GAAGiS;wBAE5D5X,KAAKkX,GAAgB7Q,KACnB,IAAImR,GAAQ7P,EAAU8P,KAAYE;;;;YAK1C,OAAO3X,KAAKkX;;;;QAGdpX,iBAAAA,SAAU2H;QAcR,IAAMoQ,IAAa7X,KAAKwW,QAAQsB,OAAO,EAACrQ;QACxC,OAAO,IAAIwP,EACTjX,KAAKsH,MACLtH,KAAKsW,iBACLtW,KAAK8W,GAAgB7Q,SACrB4R,GACA7X,KAAKkG,OACLlG,KAAK+W,IACL/W,KAAKyW,SACLzW,KAAK0W;OAIT5W,iBAAAA,SAAWyW;;QAMT,IAAMwB,IAAa/X,KAAK8W,GAAgBgB,OAAO,EAACvB;QAChD,OAAO,IAAIU,EACTjX,KAAKsH,MACLtH,KAAKsW,iBACLyB,GACA/X,KAAKwW,QAAQvQ,SACbjG,KAAKkG,OACLlG,KAAK+W,IACL/W,KAAKyW,SACLzW,KAAK0W;OAIT5W,iBAAAA,SAAiBoG;QACf,OAAO,IAAI+Q,EACTjX,KAAKsH,MACLtH,KAAKsW,iBACLtW,KAAK8W,GAAgB7Q,SACrBjG,KAAKwW,QAAQvQ,SACbC,qBAEAlG,KAAKyW,SACLzW,KAAK0W;OAIT5W,iBAAAA,SAAgBoG;QACd,OAAO,IAAI+Q,EACTjX,KAAKsH,MACLtH,KAAKsW,iBACLtW,KAAK8W,GAAgB7Q,SACrBjG,KAAKwW,QAAQvQ,SACbC,oBAEAlG,KAAKyW,SACLzW,KAAK0W;OAIT5W,iBAAAA,SAAYkY;QACV,OAAO,IAAIf,EACTjX,KAAKsH,MACLtH,KAAKsW,iBACLtW,KAAK8W,GAAgB7Q,SACrBjG,KAAKwW,QAAQvQ,SACbjG,KAAKkG,OACLlG,KAAK+W,IACLiB,GACAhY,KAAK0W;OAIT5W,iBAAAA,SAAUkY;QACR,OAAO,IAAIf,EACTjX,KAAKsH,MACLtH,KAAKsW,iBACLtW,KAAK8W,GAAgB7Q,SACrBjG,KAAKwW,QAAQvQ,SACbjG,KAAKkG,OACLlG,KAAK+W,IACL/W,KAAKyW,SACLuB;;;;;;;;IAUJlY,iBAAAA,SAAwBwH;QACtB,OAAO,IAAI2P,EACT3P;6BACqB,MACrBtH,KAAK8W,GAAgB7Q,SACrBjG,KAAKwW,QAAQvQ,SACbjG,KAAKkG,OACLlG,KAAK+W,IACL/W,KAAKyW,SACLzW,KAAK0W;;;;;;IAQT5W,iBAAAA;QACE,OAC0B,MAAxBE,KAAKwW,QAAQ7Q,UACE,SAAf3F,KAAKkG,SACW,QAAhBlG,KAAKyW,WACS,QAAdzW,KAAK0W,UAC4B,MAAhC1W,KAAK8W,GAAgBnR,UACa,MAAhC3F,KAAK8W,GAAgBnR,UACpB3F,KAAK8W,GAAgB,GAAGxF,MAAMiG;;;;;IAOtCzX,0BAAAA;QACE,OAAUE,KAAKiY,KAAWhK,yBAAoBjO,KAAK+W;OAGrDjX,uBAAAA;QACE,OAAO,kBAAgBE,KAAKiY,KAAWxW,8BACrCzB,KAAK+W;OAITjX,sBAAAA,SAAQkF;QACN,OACEhF,KAAKiY,KAAWzS,QAAQR,EAAMiT,SAC9BjY,KAAK+W,OAAc/R,EAAM+R;OAI7BjX,iBAAAA,SAAcoY,GAAcC;QAE1B,KADA,IAAIC,eACkBpY,IAAAA,KAAKuW,SAALvW,cAAAA,KAAc;YAA/B,IAAMuW,UACH8B,IAAO9B,EAAQ5I,QAAQuK,GAAIC;YACjC,IAAa,MAATE,GACF,OAAOA;YAETD,IAAqBA,KAAsB7B,EAAQjF,MAAMiG;;QAO3D,OAAO;OAGTzX,sBAAAA,SAAQ4T;QACN,OACE1T,KAAKsY,GAA8B5E,MACnC1T,KAAKuY,GAAe7E,MACpB1T,KAAKwY,GAAe9E,MACpB1T,KAAKyY,GAAc/E;OAIvB5T,iBAAAA;QACE,QAAQmK,EAAkBjK,KAAKkG,8BAAUlG,KAAK+W;OAGhDjX,iBAAAA;QACE,QAAQmK,EAAkBjK,KAAKkG,6BAAUlG,KAAK+W;OAGhDjX,iBAAAA;QACE,OAAOE,KAAK8W,GAAgBnR,SAAS,IACjC3F,KAAK8W,GAAgB,GAAGxF,QACxB;OAGNxR,iBAAAA;QACE,KAAqBE,WAAAA,IAAAA,KAAKwW,SAALxW,cAAAA;YAAhB,IAAMyH;YACT,IAAIA,aAAkBiR,MAAejR,EAAOkR,MAC1C,OAAOlR,EAAO6J;;QAGlB,OAAO;;;;IAKTxR,iBAAAA,SAAmB8Y;QACjB,KAAqB5Y,WAAAA,IAAAA,KAAKwW,SAALxW,cAAAA;YAAhB,IAAMyH;YACT,IAAIA,aAAkBiR,MAChBE,EAAUrR,QAAQE,EAAOoR,OAAO,GAClC,OAAOpR,EAAOoR;;QAIpB,OAAO;OAGT/Y,iBAAAA;QACE,OAAOE,KAAKiY,KAAWa;OAGzBhZ,iBAAAA;QACE,OAAgC,SAAzBE,KAAKsW;;;;;;IAOdxW,iBAAAA;QACE,KAAKE,KAAK+Y,IACR,wBAAI/Y,KAAK+W,IACP/W,KAAK+Y,KAAiB,IAAIC,GACxBhZ,KAAKsH,MACLtH,KAAKsW,iBACLtW,KAAKuW,SACLvW,KAAKwW,SACLxW,KAAKkG,OACLlG,KAAKyW,SACLzW,KAAK0W,aAEF;YAGL;;YADA,IAAMuC,IAAW,WACKjZ,IAAAA,KAAKuW,SAALvW,cAAAA,KAAc;gBAA/B,IAAMuW,UACHqB,gCACJrB,EAAQqB;gBAGVqB,EAAS5S,KAAK,IAAImR,GAAQjB,EAAQjF,OAAOsG;;;wBAI3C,IAAMnB,IAAUzW,KAAK0W,QACjB,IAAIwC,GAAMlZ,KAAK0W,MAAMyC,WAAWnZ,KAAK0W,MAAM0C,UAC3C,MACE1C,IAAQ1W,KAAKyW,UACf,IAAIyC,GAAMlZ,KAAKyW,QAAQ0C,WAAWnZ,KAAKyW,QAAQ2C,UAC/C;;wBAGJpZ,KAAK+Y,KAAiB,IAAIC,GACxBhZ,KAAKsH,MACLtH,KAAKsW,iBACL2C,GACAjZ,KAAKwW,SACLxW,KAAKkG,OACLuQ,GACAC;;QAIN,OAAO1W,KAAK+Y;OAGNjZ,iBAAAA,SAA8B4T;QACpC,IAAM2F,IAAU3F,EAAI3K,IAAIzB;QACxB,OAA6B,SAAzBtH,KAAKsW,kBAIL5C,EAAI3K,IAAIuQ,GAAgBtZ,KAAKsW,oBAC7BtW,KAAKsH,KAAK6J,EAAWkI,KAEd/Q,EAAYuO,GAAc7W,KAAKsH,QAEjCtH,KAAKsH,KAAK9B,QAAQ6T,KAGlBrZ,KAAKsH,KAAKiS,EAAoBF;;;;;;IAQjCvZ,iBAAAA,SAAe4T;QACrB,KAAsB1T,WAAAA,IAAAA,KAAK8W,IAAL9W,cAAAA;YAAjB,IAAMuW;;wBAET,KAAKA,EAAQjF,MAAMiG,OAA6C,SAA7B7D,EAAIpC,MAAMiF,EAAQjF,QACnD;;QAGJ;OAGMxR,iBAAAA,SAAe4T;QACrB,KAAqB1T,WAAAA,IAAAA,KAAKwW,SAALxW,cAAAA;YACnB,UAAYwZ,QAAQ9F,IAClB;;QAGJ;;;;;IAMM5T,iBAAAA,SAAc4T;QACpB,SAAI1T,KAAKyW,YAAYzW,KAAKyW,QAAQgD,GAAoBzZ,KAAKuW,SAAS7C,MAGhE1T,KAAK0W,SAAS1W,KAAK0W,MAAM+C,GAAoBzZ,KAAKuW,SAAS7C;OAMzD5T,iBAAAA,SAAiBkY;;IA0BzBlY,WACSwR,GACAuH,GACAjX;QAHT9B;gBAKE0B,IAAAA,8BAJO8P,GACAtR,OAAA6Y,GACA7Y,UAAA4B;;;;sCAQT9B,SAAcwR,GAAkBuH,GAAcjX;QAC5C,IAAI0P,EAAMiG,KACR,yBAAIsB,IASK,IAAIa,GAAiBpI,GAAO1P,KAU5B,IAAI+X,GAAerI,GAAOuH,GAAIjX;QAElC,IAAI6N,GAAY7N,IAAQ;YAC7B,yBAAIiX,GACF,MAAM,IAAIzV,EACRhD,EAAKI,kBACL;YAGJ,OAAO,IAAIkY,EAAYpH,GAAOuH,GAAIjX;;QAC7B,IAAI8N,GAAW9N,IAAQ;YAC5B,yBAAIiX,GACF,MAAM,IAAIzV,EACRhD,EAAKI,kBACL;YAGJ,OAAO,IAAIkY,EAAYpH,GAAOuH,GAAIjX;;QAC7B,iDAAIiX,IACF,IAAIe,GAAoBtI,GAAO1P,uBAC7BiX,IAKF,IAAIgB,GAASvI,GAAO1P,uDAClBiX,IAKF,IAAIiB,GAAuBxI,GAAO1P,KAElC,IAAI8W,EAAYpH,GAAOuH,GAAIjX;OAItC9B,sBAAAA,SAAQ4T;QACN,IAAM1O,IAAQ0O,EAAIpC,MAAMtR,KAAKsR;;gBAG7B,OACY,SAAVtM,KACAgG,EAAUhL,KAAK4B,WAAWoJ,EAAUhG,MACpChF,KAAK+Z,GAAkBnN,EAAa5H,GAAOhF,KAAK4B;OAI1C9B,iBAAAA,SAAkB0N;QAC1B,QAAQxN,KAAK6Y;UACX;YACE,OAAOrL,IAAa;;UACtB;YACE,OAAOA,KAAc;;UACvB;YACE,OAAsB,MAAfA;;UACT;YACE,OAAOA,IAAa;;UACtB;YACE,OAAOA,KAAc;;UACvB;YACE,OA7hBD5H;;OAiiBL9F,iBAAAA;QACE,OACE,oHAKEyH,QAAQvH,KAAK6Y,OAAO;OAI1B/Y,0BAAAA;;;;QAIE,OACEE,KAAKsR,MAAMjK,MACXrH,KAAK6Y,GAAGpX,aACRwM,EAAYjO,KAAK4B;OAIrB9B,sBAAAA,SAAQkF;QACN,OAAIA,aAAiB0T,KAEjB1Y,KAAK6Y,OAAO7T,EAAM6T,MAClB7Y,KAAKsR,MAAM9L,QAAQR,EAAMsM,UACzBrG,EAAYjL,KAAK4B,OAAOoD,EAAMpD;OAOpC9B,uBAAAA;QACE,OAAUE,KAAKsR,MAAMjK,YAAqBrH,KAAK6Y,WAAM5K,EACnDjO,KAAK4B;;;IAST9B,WAAYwR,GAAkBuH,GAAcjX;QAA5C9B;gBACE0B,IAAAA,aAAM8P,GAAOuH,GAAIjX,YAKZmH,MAAMT,EAAY+F,EAASzM,EAAM4J;;WATNkN,SAYlC5Y,sBAAAA,SAAQ4T;QACN,IAAMlG,IAAalF,EAAYvC,EAAW2N,EAAI3K,KAAK/I,KAAK+I;QACxD,OAAO/I,KAAK+Z,GAAkBvM;;EAdEkL;IAsBlC5Y,WAAYwR,GAAkB1P;QAA9B9B;gBACE0B,IAAAA,aAAM8P,mBAAoB1P,YAErBiM,QAAQjM,EAAMuK,WAAWC,UAAU,IAAIvE,KAAI8E,SAAAA;YAKvCrE,OAAAA,EAAY+F,EAAS1B,EAAEnB;;;WAXEkN,SAepC5Y,sBAAAA,SAAQ4T;QACN,OAAO1T,KAAK6N,KAAKuC,MAAKrH,SAAAA;YAAOA,OAAAA,EAAIvD,QAAQkO,EAAI3K;;;EAhBX2P;IAsBpC5Y,WAAYwR,GAAkB1P;eAC5BJ,aAAM8P,2CAAgC1P;;WAFD8W,SAKvC5Y,sBAAAA,SAAQ4T;QACN,IAAM1O,IAAQ0O,EAAIpC,MAAMtR,KAAKsR;QAC7B,OAAO9B,GAAQxK,MAAUuH,EAAmBvH,EAAMmH,YAAYnM,KAAK4B;;EAP9B8W;IAavC5Y,WAAYwR,GAAkB1P;eAC5BJ,aAAM8P,mBAAoB1P;;WAFA8W,SAM5B5Y,sBAAAA,SAAQ4T;QACN,IAAM1O,IAAQ0O,EAAIpC,MAAMtR,KAAKsR;QAC7B,OAAiB,SAAVtM,KAAkBuH,EAAmBvM,KAAK4B,MAAiBuK,YAAEnH;;EAR1C0T;IAc5B5Y,WAAYwR,GAAkB1P;eAC5BJ,aAAM8P,mDAAoC1P;;WAFF8W,SAM1C5Y,sBAAAA,SAAQ4T;QAAR5T,cACQkF,IAAQ0O,EAAIpC,MAAMtR,KAAKsR;QAC7B,UAAK9B,GAAQxK,OAAWA,EAAMmH,WAAWC,WAGlCpH,EAAMmH,WAAWC,OAAOgE,MAAK4J,SAAAA;YAClCzN,OAAAA,EAAmBvM,EAAK4B,MAAiBuK,YAAE6N;;;EAZLtB;IAwC1C5Y,WAAqBqZ,GAAgCC;QAAhCpZ,gBAAAmZ,GAAgCnZ,cAAAoZ;;WAErDtZ,0BAAAA;;QAEE,QAAUE,KAAKoZ,SAAS,MAAM,aAAOpZ,KAAKmZ,SACvCtR,KAAIoS,SAAAA;YAAKhM,OAAAA,EAAYgM;YACrB7S,KAAK;;;;;;IAOVtH,iBAAAA,SAAoByW,GAAoB7C;QAMtC,KADA,IAAIlG,IAAa,GACR/G,IAAI,GAAGA,IAAIzG,KAAKmZ,SAASxT,QAAQc,KAAK;YAC7C,IAAMyT,IAAmB3D,EAAQ9P,IAC3B0T,IAAYna,KAAKmZ,SAAS1S;YAqBhC,IAfE+G,IALE0M,EAAiB5I,MAAMiG,MAKZjP,EAAYvC,EACvBuC,EAAY+F,EAAS8L,EAAU3O,iBAC/BkI,EAAI3K,OAQO6D,EAAauN,GALTzG,EAAIpC,MAAM4I,EAAiB5I;wCAO1C4I,EAAiBtC,QACnBpK,MAA2B,IAEV,MAAfA,GACF;;QAGJ,OAAOxN,KAAKoZ,SAAS5L,KAAc,IAAIA,IAAa;OAGtD1N,sBAAAA,SAAQkF;QACN,IAAc,SAAVA,GACF;QAEF,IACEhF,KAAKoZ,WAAWpU,EAAMoU,UACtBpZ,KAAKmZ,SAASxT,WAAWX,EAAMmU,SAASxT,QAExC;QAEF,KAAK,IAAIc,IAAI,GAAGA,IAAIzG,KAAKmZ,SAASxT,QAAQc,KAGxC,KAAKwE,EAFgBjL,KAAKmZ,SAAS1S,IACbzB,EAAMmU,SAAS1S,KAEnC;QAGJ;;;IAWF3G,WAAqBwR,GAAkBsG;QAAlB5X,aAAAsR,cACfsG,MACFA,4BAEF5X,KAAK4X,MAAMA,GACX5X,KAAKoa,KAAe9I,EAAMiG;;WAG5BzX,sBAAAA,SAAQoY,GAAcC;QACpB,IAAM3K,IAAaxN,KAAKoa,KACpB9R,EAAYvC,EAAWmS,EAAGnP,KAAKoP,EAAGpP,gBF1rBxCuI,GACA4G,GACAC;YAEA,IAAMkC,IAAKnC,EAAG5G,MAAMA,IACdgJ,IAAKnC,EAAG7G,MAAMA;YACpB,OAAW,SAAP+I,KAAsB,SAAPC,IACV1N,EAAayN,GAAIC,KA5FnB1U;UEgxBuB5F,KAAKsR,OAAO4G,GAAIC;QAC5C,QAAQnY,KAAK4X;UACX;YACE,OAAOpK;;UACT;YACE,QAAQ,IAAIA;;UACd;YACE,OAnxBD5H;;OAuxBL9F,0BAAAA;;QAEE,OAAOE,KAAKsR,MAAMjK,MAAoBrH,KAAK4X,IAAInW;OAGjD3B,uBAAAA;QACE,OAAUE,KAAKsR,MAAMjK,aAAsBrH,KAAK4X;OAGlD9X,sBAAAA,SAAQkF;QACN,OAAOhF,KAAK4X,QAAQ5S,EAAM4S,OAAO5X,KAAKsR,MAAM9L,QAAQR,EAAMsM;;;ICxxB5DxR;;IAEWya;;;;;IAKAC;;IAEAC;;;;;IAKAC;;IAEAC;;;;UAKAC;;;;;;UAOAC;yBAZAF,IAAmCrV,EAAgByB,yBAKnD6T,IAAgDtV,EAAgByB,yBAOhE8T,IAA0BtR,EAAWuR;QA1BrC9a,cAAAua,GAKAva,gBAAAwa,aAEAC,GAKAza,sBAAA0a,aAEAC;QAKA3a,oCAAA4a,GAOA5a,mBAAA6a;;yFAIX/a,iBAAAA,SAAmB4a;QACjB,OAAO,IAAIK,EACT/a,KAAKua,QACLva,KAAKwa,UACLxa,KAAKya,IACLC,GACA1a,KAAK2a,IACL3a,KAAK4a,8BACL5a,KAAK6a;;;;;;IAQT/a,iBAAAA,SACE+a,GACAF;QAEA,OAAO,IAAII,EACT/a,KAAKua,QACLva,KAAKwa,UACLxa,KAAKya,IACLza,KAAK0a,gBACLC,GACA3a,KAAK4a,8BACLC;;;;;;IAQJ/a,iBAAAA,SACE8a;QAEA,OAAO,IAAIG,EACT/a,KAAKua,QACLva,KAAKwa,UACLxa,KAAKya,IACLza,KAAK0a,gBACL1a,KAAK2a,IACLC,GACA5a,KAAK6a;;;;AClGT/a,SAAmBgJ;IAAA9I,aAAA8I;;;;;;;;;;;;;SLsCLkS,GAAiB1Z;IAC/B,QAAQA;MACN,KAAKlB,EAAKC;QACR,OAnCwFuF;;MAoC1F,KAAKxF,EAAKE;MACV,KAAKF,EAAKG;MACV,KAAKH,EAAKK;MACV,KAAKL,EAAKU;MACV,KAAKV,EAAKe;MACV,KAAKf,EAAKgB;;;cAGV,KAAKhB,EAAKS;QACR;;MACF,KAAKT,EAAKI;MACV,KAAKJ,EAAKM;MACV,KAAKN,EAAKO;MACV,KAAKP,EAAKQ;MACV,KAAKR,EAAKW;;;;cAIV,KAAKX,EAAKY;MACV,KAAKZ,EAAKa;MACV,KAAKb,EAAKc;MACV,KAAKd,EAAKiB;QACR;;MACF;QACE,OA5DwFuE;;;;;;;;;;;;;;;;;;;;;;aAwG9EqV,GAAmB3Z;IACjC,eAAIA;;;IAIF,OADA4Z,GAAS,4BACF9a,EAAKG;IAGd,QAAQe;MACN,KAAK0U,GAAQ3V;QACX,OAAOD,EAAKC;;MACd,KAAK2V,GAAQ1V;QACX,OAAOF,EAAKE;;MACd,KAAK0V,GAAQzV;QACX,OAAOH,EAAKG;;MACd,KAAKyV,GAAQvV;QACX,OAAOL,EAAKK;;MACd,KAAKuV,GAAQlV;QACX,OAAOV,EAAKU;;MACd,KAAKkV,GAAQ7U;QACX,OAAOf,EAAKe;;MACd,KAAK6U,GAAQ5U;QACX,OAAOhB,EAAKgB;;MACd,KAAK4U,GAAQnV;QACX,OAAOT,EAAKS;;MACd,KAAKmV,GAAQxV;QACX,OAAOJ,EAAKI;;MACd,KAAKwV,GAAQtV;QACX,OAAON,EAAKM;;MACd,KAAKsV,GAAQrV;QACX,OAAOP,EAAKO;;MACd,KAAKqV,GAAQpV;QACX,OAAOR,EAAKQ;;MACd,KAAKoV,GAAQjV;QACX,OAAOX,EAAKW;;MACd,KAAKiV,GAAQhV;QACX,OAAOZ,EAAKY;;MACd,KAAKgV,GAAQ/U;QACX,OAAOb,EAAKa;;MACd,KAAK+U,GAAQ9U;QACX,OAAOd,EAAKc;;MACd,KAAK8U,GAAQ3U;QACX,OAAOjB,EAAKiB;;MACd;QACE,OApJwFuE;;;;;;;;;;;UAMzFoQ,OAAAA,6BAEHmF;AACAA,gCACAA;AACAA,oDACAA;AACAA,8CACAA;AACAA,iDACAA;AACAA,wDACAA;AACAA,2CACAA;AACAA,mCACAA,yCACAA;;;;;;;;;;;;;;;;;;;;;IMFArb,WACSiG,GACPqV;iBADOrV,GAGP/F,KAAKob,OAAOA,KAAcC,GAASC;;;eAIrCxb,iBAAAA,SAAOiJ,GAAQnH;QACb,OAAO,IAAI2Z,EACTvb,KAAK+F,GACL/F,KAAKob,KACFI,GAAOzS,GAAKnH,GAAO5B,KAAK+F,GACxB0V,GAAK,MAAM,MAAMJ,GAASK,IAAO,MAAM;;;IAK9C5b,qBAAAA,SAAOiJ;QACL,OAAO,IAAIwS,EACTvb,KAAK+F,GACL/F,KAAKob,KACFO,OAAO5S,GAAK/I,KAAK+F,GACjB0V,GAAK,MAAM,MAAMJ,GAASK,IAAO,MAAM;;;IAK9C5b,kBAAAA,SAAIiJ;QAEF,KADA,IAAI6S,IAAO5b,KAAKob,OACRQ,EAAKxS,OAAW;YACtB,IAAMyS,IAAM7b,KAAK+F,EAAWgD,GAAK6S,EAAK7S;YACtC,IAAY,MAAR8S,GACF,OAAOD,EAAKha;YACHia,IAAM,IACfD,IAAOA,EAAK5U,OACH6U,IAAM,MACfD,IAAOA,EAAK3U;;QAGhB,OAAO;;;;IAKTnH,sBAAAA,SAAQiJ;QAIN;;QAFA,IAAI+S,IAAc,GACdF,IAAO5b,KAAKob,OACRQ,EAAKxS,OAAW;YACtB,IAAMyS,IAAM7b,KAAK+F,EAAWgD,GAAK6S,EAAK7S;YACtC,IAAY,MAAR8S,GACF,OAAOC,IAAcF,EAAK5U,KAAKT;YACtBsV,IAAM,IACfD,IAAOA,EAAK5U;;YAGZ8U,KAAeF,EAAK5U,KAAKT,OAAO,GAChCqV,IAAOA,EAAK3U;;;gBAIhB,QAAQ;OAGVnH,gBAAAA;QACE,OAAOE,KAAKob,KAAKhS;OAInB7C;;aAAAA;YACE,OAAOvG,KAAKob,KAAK7U;;;;;;IAInBzG,iBAAAA;QACE,OAAOE,KAAKob,KAAKW;;;IAInBjc,iBAAAA;QACE,OAAOE,KAAKob,KAAKY;;;;;;IAOnBlc,iBAAAA,SAAoBmc;QAClB,OAAQjc,KAAKob,KAAwBc,GAAiBD;OAGxDnc,sBAAAA,SAAQ6G;QACN3G,KAAKkc,IAAiB,SAACC,GAAGxP;mBACxBhG,EAAGwV,GAAGxP;;OAKV7M,uBAAAA;QACE,IAAMsc,IAAyB;QAK/B,OAJApc,KAAKkc,IAAiB,SAACC,GAAGxP;mBACxByP,EAAa/V,KAAQ8V,UAAKxP;aAGrB,MAAIyP,EAAahV,KAAK;;;;;;;IAQ/BtH,iBAAAA,SAAoBmc;QAClB,OAAQjc,KAAKob,KAAwBiB,GAAiBJ;;;IAIxDnc,iBAAAA;QACE,OAAO,IAAIwc,GAAwBtc,KAAKob,MAAM,MAAMpb,KAAK+F;OAG3DjG,iBAAAA,SAAgBiJ;QACd,OAAO,IAAIuT,GAAwBtc,KAAKob,MAAMrS,GAAK/I,KAAK+F;OAG1DjG,iBAAAA;QACE,OAAO,IAAIwc,GAAwBtc,KAAKob,MAAM,MAAMpb,KAAK+F;OAG3DjG,iBAAAA,SAAuBiJ;QACrB,OAAO,IAAIuT,GAAwBtc,KAAKob,MAAMrS,GAAK/I,KAAK+F;;;IAS1DjG,WACE8b,GACAW,GACAxW,GACAyW;QAEAxc,KAAKwc,KAAYA,GACjBxc,KAAKyc,KAAY;QAGjB,KADA,IAAIZ,IAAM,IACFD,EAAKxS,OAOX,IANAyS,IAAMU,IAAWxW,EAAW6V,EAAK7S,KAAKwT,KAAY;;QAE9CC,MACFX,MAAQ,IAGNA,IAAM;;QAGND,IADE5b,KAAKwc,KACAZ,EAAK5U,OAEL4U,EAAK3U,YAET;YAAA,IAAY,MAAR4U,GAAW;;;gBAGpB7b,KAAKyc,GAAUpW,KAAKuV;gBACpB;;;;wBAIA5b,KAAKyc,GAAUpW,KAAKuV,IAElBA,IADE5b,KAAKwc,KACAZ,EAAK3U,QAEL2U,EAAK5U;;;WAMpBlH,iBAAAA;QAME,IAAI8b,IAAO5b,KAAKyc,GAAUC,OACpBnO,IAAS;YAAExF,KAAK6S,EAAK7S;YAAKnH,OAAOga,EAAKha;;QAE5C,IAAI5B,KAAKwc,IAEP,KADAZ,IAAOA,EAAK5U,OACJ4U,EAAKxS,OACXpJ,KAAKyc,GAAUpW,KAAKuV,IACpBA,IAAOA,EAAK3U,YAId,KADA2U,IAAOA,EAAK3U,QACJ2U,EAAKxS,OACXpJ,KAAKyc,GAAUpW,KAAKuV;QACpBA,IAAOA,EAAK5U;QAIhB,OAAOuH;OAGTzO,iBAAAA;QACE,OAAOE,KAAKyc,GAAU9W,SAAS;OAGjC7F,iBAAAA;QACE,IAA8B,MAA1BE,KAAKyc,GAAU9W,QACjB,OAAO;QAGT,IAAMiW,IAAO5b,KAAKyc,GAAUzc,KAAKyc,GAAU9W,SAAS;QACpD,OAAO;YAAEoD,KAAK6S,EAAK7S;YAAKnH,OAAOga,EAAKha;;;;IAkBtC9B,WACSiJ,GACAnH,GACP+a,GACA3V,GACAC;QAJOjH,WAAA+I,GACA/I,aAAA4B,GAKP5B,KAAK2c,QAAiB,QAATA,IAAgBA,IAAQtB,EAASuB,KAC9C5c,KAAKgH,OAAe,QAARA,IAAeA,IAAOqU,EAASC;QAC3Ctb,KAAKiH,QAAiB,QAATA,IAAgBA,IAAQoU,EAASC,OAC9Ctb,KAAKuG,OAAOvG,KAAKgH,KAAKT,OAAO,IAAIvG,KAAKiH,MAAMV;;;eAI9CzG,iBAAAA,SACEiJ,GACAnH,GACA+a,GACA3V,GACAC;QAEA,OAAO,IAAIoU,EACF,QAAPtS,IAAcA,IAAM/I,KAAK+I,KAChB,QAATnH,IAAgBA,IAAQ5B,KAAK4B,OACpB,QAAT+a,IAAgBA,IAAQ3c,KAAK2c,OACrB,QAAR3V,IAAeA,IAAOhH,KAAKgH,MAClB,QAATC,IAAgBA,IAAQjH,KAAKiH;OAIjCnH,gBAAAA;QACE;;;;;;IAOFA,iBAAAA,SAAoBmc;QAClB,OACGjc,KAAKgH,KAAwBkV,GAAiBD,MAC/CA,EAAOjc,KAAK+I,KAAK/I,KAAK4B,UACrB5B,KAAKiH,MAAyBiV,GAAiBD;;;;;;IAQpDnc,iBAAAA,SAAoBmc;QAClB,OACGjc,KAAKiH,MAAyBoV,GAAiBJ,MAChDA,EAAOjc,KAAK+I,KAAK/I,KAAK4B,UACrB5B,KAAKgH,KAAwBqV,GAAiBJ;;;IAK3Cnc,kBAAAA;QACN,OAAIE,KAAKgH,KAAKoC,MACLpJ,OAECA,KAAKgH,KAAwBD;;;IAKzCjH,iBAAAA;QACE,OAAOE,KAAK+G,MAAMgC;;;IAIpBjJ,iBAAAA;QACE,OAAIE,KAAKiH,MAAMmC,MACNpJ,KAAK+I,MAEL/I,KAAKiH,MAAM+U;;;IAKtBlc,iBAAAA,SAAOiJ,GAAQnH,GAAUmE;QACvB,IAAI8W,IAAoB7c,MAClB6b,IAAM9V,EAAWgD,GAAK8T,EAAE9T;QAc9B,QAZE8T,IADEhB,IAAM,IACJgB,EAAEpB,GAAK,MAAM,MAAM,MAAMoB,EAAE7V,KAAKwU,GAAOzS,GAAKnH,GAAOmE,IAAa,QACnD,MAAR8V,IACLgB,EAAEpB,GAAK,MAAM7Z,GAAO,MAAM,MAAM,QAEhCib,EAAEpB,GACJ,MACA,MACA,MACA,MACAoB,EAAE5V,MAAMuU,GAAOzS,GAAKnH,GAAOmE,KAGtB+W;OAGHhd,iBAAAA;QACN,IAAIE,KAAKgH,KAAKoC,KACZ,OAAOiS,EAASC;QAElB,IAAIuB,IAAoB7c;QAKxB,OAJK6c,EAAE7V,KAAK+V,QAAYF,EAAE7V,KAAKA,KAAK+V,SAClCF,IAAIA,EAAEG,QAERH,IAAIA,EAAEpB,GAAK,MAAM,MAAM,MAAOoB,EAAE7V,KAAwBiW,MAAa,OAC5DH;;;IAIXhd,qBAAAA,SACEiJ,GACAhD;QAEA,IAAImX,GACAL,IAAoB7c;QACxB,IAAI+F,EAAWgD,GAAK8T,EAAE9T,OAAO,GACtB8T,EAAE7V,KAAKoC,OAAcyT,EAAE7V,KAAK+V,QAAYF,EAAE7V,KAAKA,KAAK+V,SACvDF,IAAIA,EAAEG;QAERH,IAAIA,EAAEpB,GAAK,MAAM,MAAM,MAAMoB,EAAE7V,KAAK2U,OAAO5S,GAAKhD,IAAa,YACxD;YAOL,IANI8W,EAAE7V,KAAK+V,SACTF,IAAIA,EAAEM,OAEHN,EAAE5V,MAAMmC,OAAcyT,EAAE5V,MAAM8V,QAAYF,EAAE5V,MAAMD,KAAK+V,SAC1DF,IAAIA,EAAEO;YAEuB,MAA3BrX,EAAWgD,GAAK8T,EAAE9T,MAAY;gBAChC,IAAI8T,EAAE5V,MAAMmC,KACV,OAAOiS,EAASC;gBAEhB4B,IAAYL,EAAE5V,MAAyBF,OACvC8V,IAAIA,EAAEpB,GACJyB,EAASnU,KACTmU,EAAStb,OACT,MACA,MACCib,EAAE5V,MAAyBgW;;YAIlCJ,IAAIA,EAAEpB,GAAK,MAAM,MAAM,MAAM,MAAMoB,EAAE5V,MAAM0U,OAAO5S,GAAKhD;;QAEzD,OAAO8W,EAAEC;OAGXhd,iBAAAA;QACE,OAAOE,KAAK2c;;;IAIN7c,iBAAAA;QACN,IAAI+c,IAAoB7c;QAUxB,OATI6c,EAAE5V,MAAM8V,SAAYF,EAAE7V,KAAK+V,SAC7BF,IAAIA,EAAEQ,OAEJR,EAAE7V,KAAK+V,QAAWF,EAAE7V,KAAKA,KAAK+V,SAChCF,IAAIA,EAAEM;QAEJN,EAAE7V,KAAK+V,QAAWF,EAAE5V,MAAM8V,SAC5BF,IAAIA,EAAES,OAEDT;OAGD/c,iBAAAA;QACN,IAAI+c,IAAI7c,KAAKsd;QAYb,OAXIT,EAAE5V,MAAMD,KAAK+V,SASfF,KADAA,KAPAA,IAAIA,EAAEpB,GACJ,MACA,MACA,MACA,MACCoB,EAAE5V,MAAyBkW,OAExBE,MACAC;QAEDT;OAGD/c,iBAAAA;QACN,IAAI+c,IAAI7c,KAAKsd;QAKb,OAJIT,EAAE7V,KAAKA,KAAK+V,SAEdF,KADAA,IAAIA,EAAEM,MACAG,OAEDT;OAGD/c,iBAAAA;QACN,IAAMyd,IAAKvd,KAAKyb,GAAK,MAAM,MAAMJ,EAASuB,KAAK,MAAM5c,KAAKiH,MAAMD;QAChE,OAAQhH,KAAKiH,MAAyBwU,GACpC,MACA,MACAzb,KAAK2c,OACLY,GACA;OAIIzd,iBAAAA;QACN,IAAM0d,IAAKxd,KAAKyb,GAAK,MAAM,MAAMJ,EAASuB,KAAK5c,KAAKgH,KAAKC,OAAO;QAChE,OAAQjH,KAAKgH,KAAwByU,GAAK,MAAM,MAAMzb,KAAK2c,OAAO,MAAMa;OAGlE1d,iBAAAA;QACN,IAAMkH,IAAOhH,KAAKgH,KAAKyU,GAAK,MAAM,OAAOzb,KAAKgH,KAAK2V,OAAO,MAAM,OAC1D1V,IAAQjH,KAAKiH,MAAMwU,GAAK,MAAM,OAAOzb,KAAKiH,MAAM0V,OAAO,MAAM;QACnE,OAAO3c,KAAKyb,GAAK,MAAM,OAAOzb,KAAK2c,OAAO3V,GAAMC;;;IAIlDnH,iBAAAA;QACE,IAAM2d,IAAazd,KAAK0d;QACxB,OAAI7Y,KAAK8Y,IAAI,GAAKF,MAAezd,KAAKuG,OAAO;;;;IASrCzG,iBAAAA;QACR,IAAIE,KAAK+c,QAAW/c,KAAKgH,KAAK+V,MAC5B,MAveenX;QAyejB,IAAI5F,KAAKiH,MAAM8V,MACb,MA1eenX;QA4ejB,IAAM6X,IAAczd,KAAKgH,KAAwB0W;QACjD,IAAID,MAAgBzd,KAAKiH,MAAyByW,MAChD,MA9ee9X;QAgff,OAAO6X,KAAczd,KAAK+c,OAAU,IAAI;;;;;;;;;WArPJ,MAEjC1B,aACAA;;AAiUTA,GAASC,QAAQ;IAzEjBxb;QAgBEE,YAAO;;WAfP+I;aAAAA;YACE,MAxfiBnD;;;;QA0fnBhE;aAAAA;YACE,MA3fiBgE;;;;QA6fnB+W;aAAAA;YACE,MA9fiB/W;;;;QAggBnBoB;aAAAA;YACE,MAjgBiBpB;;;;QAmgBnBqB;aAAAA;YACE,MApgBiBrB;;;;;;IAygBnB9F,iBAAAA,SACEiJ,GACAnH,GACA+a,GACA3V,GACAC;QAEA,OAAOjH;;;IAITF,iBAAAA,SAAOiJ,GAAQnH,GAAUmE;QACvB,OAAO,IAAIsV,GAAetS,GAAKnH;;;IAIjC9B,qBAAAA,SAAOiJ,GAAQhD;QACb,OAAO/F;OAGTF,gBAAAA;QACE;OAGFA,iBAAAA,SAAiBmc;QACf;OAGFnc,iBAAAA,SAAiBmc;QACf;OAGFnc,iBAAAA;QACE,OAAO;OAGTA,iBAAAA;QACE,OAAO;OAGTA,iBAAAA;QACE;;;IAIFA,iBAAAA;QACE;OAGQA,iBAAAA;QACR,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;ICxjBTA,WAAoBiG;iBAAAA,GAClB/F,KAAK0S,OAAO,IAAI6I,GAAsBvb,KAAK+F;;WAG7CjG,kBAAAA,SAAI8d;QACF,OAA+B,SAAxB5d,KAAK0S,KAAK5P,IAAI8a;OAGvB9d,oBAAAA;QACE,OAAOE,KAAK0S,KAAKqJ;OAGnBjc,mBAAAA;QACE,OAAOE,KAAK0S,KAAKsJ;OAGnBzV;aAAAA;YACE,OAAOvG,KAAK0S,KAAKnM;;;;QAGnBzG,sBAAAA,SAAQ8d;QACN,OAAO5d,KAAK0S,KAAKnL,QAAQqW;;8DAI3B9d,sBAAAA,SAAQ+d;QACN7d,KAAK0S,KAAKwJ,IAAiB,SAACC,GAAMxP;mBAChCkR,EAAG1B;;;yEAMPrc,iBAAAA,SAAege,GAAeD;QAE5B,KADA,IAAME,IAAO/d,KAAK0S,KAAKsL,GAAgBF,EAAM,KACtCC,EAAKE,QAAW;YACrB,IAAML,IAAOG,EAAKG;YAClB,IAAIle,KAAK+F,EAAW6X,EAAK7U,KAAK+U,EAAM,OAAO,GACzC;YAEFD,EAAGD,EAAK7U;;;;;;IAOZjJ,iBAAAA,SAAa+d,GAA0BM;QACrC,IAAIJ;QAMJ,KAJEA,eADEI,IACKne,KAAK0S,KAAKsL,GAAgBG,KAE1Bne,KAAK0S,KAAK0L,MAEZL,EAAKE,QAGV,KADeJ,EADFE,EAAKG,KACKnV,MAErB;;oEAMNjJ,iBAAAA,SAAkB8d;QAChB,IAAMG,IAAO/d,KAAK0S,KAAKsL,GAAgBJ;QACvC,OAAOG,EAAKE,OAAYF,EAAKG,KAAUnV,MAAM;OAG/CjJ,iBAAAA;QACE,OAAO,IAAIue,GAAqBre,KAAK0S,KAAK0L;OAG5Cte,iBAAAA,SAAgBiJ;QACd,OAAO,IAAIsV,GAAqBre,KAAK0S,KAAKsL,GAAgBjV;;yCAI5DjJ,kBAAAA,SAAI8d;QACF,OAAO5d,KAAKyb,GAAKzb,KAAK0S,KAAKiJ,OAAOiC,GAAMpC,GAAOoC;;8BAIjD9d,qBAAAA,SAAO8d;QACL,OAAK5d,KAAKse,IAAIV,KAGP5d,KAAKyb,GAAKzb,KAAK0S,KAAKiJ,OAAOiC,MAFzB5d;OAKXF,gBAAAA;QACE,OAAOE,KAAK0S,KAAKtJ;OAGnBtJ,iBAAAA,SAAUkF;QACR,IAAIuJ,IAAuBvO;;gBAW3B,OARIuO,EAAOhI,OAAOvB,EAAMuB,SACtBgI,IAASvJ,GACTA,IAAQhF,OAGVgF,EAAMmB,SAAQyX,SAAAA;YACZrP,IAASA,EAAOgQ,IAAIX;aAEfrP;OAGTzO,sBAAAA,SAAQkF;QACN,MAAMA,aAAiBwZ,IACrB;QAEF,IAAIxe,KAAKuG,SAASvB,EAAMuB,MACtB;QAKF,KAFA,IAAMkY,IAASze,KAAK0S,KAAK0L,MACnBM,IAAU1Z,EAAM0N,KAAK0L,MACpBK,EAAOR,QAAW;YACvB,IAAMU,IAAWF,EAAOP,KAAUnV,KAC5B6V,IAAYF,EAAQR,KAAUnV;YACpC,IAA6C,MAAzC/I,KAAK+F,EAAW4Y,GAAUC,IAC5B;;QAGJ;OAGF9e,gBAAAA;QACE,IAAM+e,IAAW;QAIjB,OAHA7e,KAAKmG,SAAQqU,SAAAA;YACXqE,EAAIxY,KAAKmU;aAEJqE;OAGT/e,uBAAAA;QACE,IAAMyO,IAAc;QAEpB,OADAvO,KAAKmG,SAAQyX,SAAAA;YAAQrP,OAAAA,EAAOlI,KAAKuX;aAC1B,eAAerP,EAAO9M,aAAa;OAGpC3B,iBAAAA,SAAK4S;QACX,IAAMnE,IAAS,IAAIiQ,EAAUxe,KAAK+F;QAElC,OADAwI,EAAOmE,OAAOA,GACPnE;;;IAKTzO,WAAoBie;kBAAAA;;WAEpBje,iBAAAA;QACE,OAAOE,KAAK+d,GAAKG,KAAUnV;OAG7BjJ,iBAAAA;QACE,OAAOE,KAAK+d,GAAKE;;KC1Jfa,KAA2B,IAAIvD,GACnCjT,EAAYvC;;SAEEgZ;IACd,OAAOD;;;SAQOE;IACd,OAAOD;;;AAST,IAAME,KAAqB,IAAI1D,GAC7BjT,EAAYvC;;SAEEmZ;IACd,OAAOD;;;AAIT,IAAME,KAA6B,IAAI5D,GACrCjT,EAAYvC;;SAEEqZ;IACd,OAAOD;;;AAIT,IAAME,KAAyB,IAAIb,GAAUlW,EAAYvC;;SACzCuZ;;IAEd,KADA,IAAIhM,IAAM+L,WACQxR,OAAAA,cAAAA;QAAb,IAAM9E;QACTuK,IAAMA,EAAIiL,IAAIxV;;IAEhB,OAAOuK;;;AAIT,IAAMiM,KAAsB,IAAIf,GAAoBvZ;;SACpCua;IACd,OAAOD;;;;;;;;;;;;;;;;;;;;;;;;;;ICtCPzf,WAAYuY;;;QAIRrY,KAAK+F,IADHsS,IACgB,SAACH,GAAcC;YAC/BE,OAAAA,EAAKH,GAAIC,MAAO7P,EAAYvC,EAAWmS,EAAGnP,KAAKoP,EAAGpP;YAElC,SAACmP,GAAcC;YAC/B7P,OAAAA,EAAYvC,EAAWmS,EAAGnP,KAAKoP,EAAGpP;WAGtC/I,KAAKyf,KAAWP,MAChBlf,KAAK0f,KAAY,IAAInE,GAA0Bvb,KAAK+F;;;;;;kBArBtDjG,SAAgB6f;QACd,OAAO,IAAIC,EAAYD,EAAO5Z;OAuBhCjG,kBAAAA,SAAIiJ;QACF,OAAiC,QAA1B/I,KAAKyf,GAAS3c,IAAIiG;OAG3BjJ,kBAAAA,SAAIiJ;QACF,OAAO/I,KAAKyf,GAAS3c,IAAIiG;OAG3BjJ,oBAAAA;QACE,OAAOE,KAAK0f,GAAU3D;OAGxBjc,mBAAAA;QACE,OAAOE,KAAK0f,GAAU1D;OAGxBlc,gBAAAA;QACE,OAAOE,KAAK0f,GAAUtW;;;;;;IAOxBtJ,sBAAAA,SAAQiJ;QACN,IAAM2K,IAAM1T,KAAKyf,GAAS3c,IAAIiG;QAC9B,OAAO2K,IAAM1T,KAAK0f,GAAUnY,QAAQmM,MAAQ;OAG9CnN;aAAAA;YACE,OAAOvG,KAAK0f,GAAUnZ;;;;;+DAIxBzG,sBAAAA,SAAQ+d;QACN7d,KAAK0f,GAAUxD,IAAiB,SAACC,GAAGxP;mBAClCkR,EAAG1B;;;2DAMPrc,kBAAAA,SAAI4T;;QAEF,IAAMJ,IAAMtT,KAAKuT,OAAOG,EAAI3K;QAC5B,OAAOuK,EAAImI,GACTnI,EAAImM,GAASjE,GAAO9H,EAAI3K,KAAK2K,IAC7BJ,EAAIoM,GAAUlE,GAAO9H,GAAK;;+CAK9B5T,qBAAAA,SAAOiJ;QACL,IAAM2K,IAAM1T,KAAK8C,IAAIiG;QACrB,OAAK2K,IAIE1T,KAAKyb,GAAKzb,KAAKyf,GAAS9D,OAAO5S,IAAM/I,KAAK0f,GAAU/D,OAAOjI,MAHzD1T;OAMXF,sBAAAA,SAAQkF;QACN,MAAMA,aAAiB4a,IACrB;QAEF,IAAI5f,KAAKuG,SAASvB,EAAMuB,MACtB;QAKF,KAFA,IAAMkY,IAASze,KAAK0f,GAAUtB,MACxBM,IAAU1Z,EAAM0a,GAAUtB,MACzBK,EAAOR,QAAW;YACvB,IAAM4B,IAAUpB,EAAOP,KAAUnV,KAC3B+W,IAAWpB,EAAQR,KAAUnV;YACnC,KAAK8W,EAAQra,QAAQsa,IACnB;;QAGJ;OAGFhgB,uBAAAA;QACE,IAAMigB,IAAuB;QAI7B,OAHA/f,KAAKmG,SAAQuN,SAAAA;YACXqM,EAAW1Z,KAAKqN,EAAIjS;aAEI,MAAtBse,EAAWpa,SACN,mBAEA,sBAAsBoa,EAAW3Y,KAAK,UAAU;OAInDtH,iBAAAA,SACN2f,GACAC;QAEA,IAAMM,IAAS,IAAIJ;QAInB,OAHAI,EAAOja,IAAa/F,KAAK+F,GACzBia,EAAOP,KAAWA,GAClBO,EAAON,KAAYA,GACZM;;;IClHXlgB;QACEE,UAAoB,IAAIub,GACtBjT,EAAYvC;;WAGdjG,oBAAAA,SAAMmgB;QACJ,IAAMlX,IAAMkX,EAAOvM,IAAI3K,KACjBmX,IAAYlgB,KAAKmgB,GAAUrd,IAAIiG;QAChCmX;;0BAOHD,EAAOG,6BACPF,EAAUE,OAEVpgB,KAAKmgB,KAAYngB,KAAKmgB,GAAU3E,GAAOzS,GAAKkX,0BAE5CA,EAAOG,4BACPF,EAAUE,OAEVpgB,KAAKmgB,KAAYngB,KAAKmgB,GAAU3E,GAAOzS,GAAK;YAC1CqX,MAAMF,EAAUE;YAChB1M,KAAKuM,EAAOvM;kCAGduM,EAAOG,6BACPF,EAAUE,OAEVpgB,KAAKmgB,KAAYngB,KAAKmgB,GAAU3E,GAAOzS,GAAK;YAC1CqX;YACA1M,KAAKuM,EAAOvM;kCAGduM,EAAOG,0BACPF,EAAUE,OAEVpgB,KAAKmgB,KAAYngB,KAAKmgB,GAAU3E,GAAOzS,GAAK;YAC1CqX;YACA1M,KAAKuM,EAAOvM;iCAGduM,EAAOG,0BACPF,EAAUE,OAEVpgB,KAAKmgB,KAAYngB,KAAKmgB,GAAUxE,OAAO5S,yBAEvCkX,EAAOG,6BACPF,EAAUE,OAEVpgB,KAAKmgB,KAAYngB,KAAKmgB,GAAU3E,GAAOzS,GAAK;YAC1CqX;YACA1M,KAAKwM,EAAUxM;+BAGjBuM,EAAOG,4BACPF,EAAUE,OAEVpgB,KAAKmgB,KAAYngB,KAAKmgB,GAAU3E,GAAOzS,GAAK;YAC1CqX;YACA1M,KAAKuM,EAAOvM;;;;;;;;;QAUd9N,OA/DA5F,KAAKmgB,KAAYngB,KAAKmgB,GAAU3E,GAAOzS,GAAKkX;OAwEhDngB,iBAAAA;QACE,IAAMugB,IAAgC;QAMtC,OALArgB,KAAKmgB,GAAUjE,IACb,SAACnT,GAAkBkX;YACjBI,EAAQha,KAAK4Z;aAGVI;;;IAKTvgB,WACWwgB,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC;QAPA7gB,aAAAsgB,GACAtgB,YAAAugB,aACAC,GACAxgB,kBAAAygB,aACAC,GACA1gB,iBAAA2gB;kBACAC,aACAC;;;kBAIX/gB,SACEwgB,GACAQ,GACAJ,GACAC;QAEA,IAAMN,IAAgC;QAKtC,OAJAS,EAAU3a,SAAQuN,SAAAA;YAChB2M,EAAQha,KAAK;gBAAE+Z;gBAAwB1M,KAAAA;;aAGlC,IAAIqN,EACTT,GACAQ,GACAlB,GAAYoB,GAASF,IACrBT,GACAK,GACAC;;;OAMJtK;aAAAA;YACE,QAAQrW,KAAK0gB,GAAYtX;;;;QAG3BtJ,sBAAAA,SAAQkF;QACN,MACEhF,KAAK2gB,cAAc3b,EAAM2b,aACzB3gB,KAAK4gB,OAAqB5b,EAAM4b,MAC/B5gB,KAAK0gB,GAAYlb,QAAQR,EAAM0b,OAC/B1gB,KAAKsgB,MAAM9a,QAAQR,EAAMsb,UACzBtgB,KAAKugB,KAAK/a,QAAQR,EAAMub,SACxBvgB,KAAKwgB,GAAQhb,QAAQR,EAAMwb,MAE5B;QAEF,IAAMH,IAAgCrgB,KAAKygB,YACrCQ,IAAqCjc,EAAMyb;QACjD,IAAIJ,EAAQ1a,WAAWsb,EAAatb,QAClC;QAEF,KAAK,IAAIc,IAAI,GAAGA,IAAI4Z,EAAQ1a,QAAQc,KAClC,IACE4Z,EAAQ5Z,GAAG2Z,SAASa,EAAaxa,GAAG2Z,SACnCC,EAAQ5Z,GAAGiN,IAAIlO,QAAQyb,EAAaxa,GAAGiN,MAExC;QAGJ;;;ICxKF5T;;;;IAIW6a;;;;IAIAuG;;;;;IAKAC;;;;;IAKAC;;;;IAIAC;kBAlBA1G,aAIAuG,aAKAC,aAKAC,aAIAC;;;;;;;;;sBAUXvhB,SACE0a,GACAvS;QAEA,IAAMiZ,IAAgB,IAAIvM;QAQ1B,OAPAuM,EAAc5N,IACZkH,GACA8G,GAAaC,GACX/G,GACAvS,KAGG,IAAIuZ,EACTlc,EAAgByB,OAChBma,GACA1B,MACAT,MACAO;;;IAcJxf;;;;;;;IAOW+a;;;;;;IAMA5S;;;;;IAKAwZ;;;;;IAKAC;;;;;IAKAC;QArBA3hB,mBAAA6a,aAMA5S,aAKAwZ,aAKAC,aAKAC;;;;;;;kBAQX7hB,SACE0a,GACAvS;QAEA,OAAO,IAAIqZ,EACT/X,EAAWuR,IACX7S,GACAqX,MACAA,MACAA;;UC1FJxf;;AAES8hB;;AAEAC;;AAEA9Y;;;;;AAKA+Y;cATAF,GAEA5hB,wBAAA6hB,GAEA7hB,WAAA+I,aAKA+Y;QAKThiB,SACS0a,GACAuH;IADA/hB,gBAAAwa,aACAuH;QAaTjiB;;AAESkiB;;AAEAC;;;;;;;AAOApH;uDAEAqH;qBAFArH,IAA0BtR,EAAWuR,sBAErCoH,WAXAliB,aAAAgiB,GAEAhiB,iBAAAiiB;IAOAjiB,mBAAA6a,GAEA7a,aAAAkiB;;IAKXpiB;;;;;QAKEE,UAA2B;;;;;;;QAQ3BA,UAGImiB;;QAGJniB,UAAmCuJ,EAAWuR,IAC9C9a;;;;;;QAOAA;;WAUAoiB;;;;;;;;;aAAAA;YACE,OAAOpiB,KAAKqiB;;;;QAIdxH;sEAAAA;YACE,OAAO7a,KAAKsiB;;;;QAIdC;mFAAAA;YACE,OAAiC,MAA1BviB,KAAKwiB;;;;QAIdC;uFAAAA;YACE,OAAOziB,KAAK0iB;;;;;;;;;IAOd5iB,iBAAAA,SAAkB+a;QACZA,EAAY8H,OAAwB,MACtC3iB,KAAK0iB,SACL1iB,KAAKsiB,KAAezH;;;;;;;;IAUxB/a,iBAAAA;QACE,IAAI2hB,IAAiBnC,MACjBoC,IAAoBpC,MACpBqC,IAAmBrC;QAkBvB,OAhBAtf,KAAK4iB,GAAgBzc,SAAQ,SAAC4C,GAAK8Z;YACjC,QAAQA;cACN;gBACEpB,IAAiBA,EAAelD,IAAIxV;gBACpC;;cACF;gBACE2Y,IAAoBA,EAAkBnD,IAAIxV;gBAC1C;;cACF;gBACE4Y,IAAmBA,EAAiBpD,IAAIxV;gBACxC;;cACF;gBACEnD;;aAIC,IAAI0b,GACTthB,KAAKsiB,IACLtiB,KAAKqiB,IACLZ,GACAC,GACAC;;;;;IAOJ7hB,iBAAAA;QACEE,KAAK0iB,SACL1iB,KAAK4iB,KAAkBT;OAGzBriB,iBAAAA,SAAkBiJ,GAAkB8Z;QAClC7iB,KAAK0iB,SACL1iB,KAAK4iB,KAAkB5iB,KAAK4iB,GAAgBpH,GAAOzS,GAAK8Z;OAG1D/iB,iBAAAA,SAAqBiJ;QACnB/I,KAAK0iB,SACL1iB,KAAK4iB,KAAkB5iB,KAAK4iB,GAAgBjH,OAAO5S;OAGrDjJ,iBAAAA;QACEE,KAAKwiB,MAAoB;OAG3B1iB,iBAAAA;QACEE,KAAKwiB,MAAoB;OAG3B1iB,iBAAAA;QACEE,KAAK0iB,SACL1iB,KAAKqiB;;;IA4BPviB,WAAoBgjB;kBAAAA;;QAGpB9iB,UAAuB,IAAI2U;;QAG3B3U,UAAiC+e;;QAGjC/e,UAAuC+iB;;;;;;QAOvC/iB,UAA8B,IAAIwe,GAAoBvZ;;;;;WAKtDnF,iBAAAA,SAAqBkjB;QACnB,KAAuBA,WAAAA,IAAAA,EAAUpB,IAAVoB,cAAAA;YAAlB,IAAMxI;YACLwI,EAAUlB,cAAkB/P,KAC9B/R,KAAKijB,GAAoBzI,GAAUwI,EAAUlB,MACpCkB,EAAUlB,cAAkBxN,MACrCtU,KAAKkjB,GACH1I,GACAwI,EAAUja,KACVia,EAAUlB;;QAKhB,KAAuBkB,WAAAA,IAAAA,EAAUnB,kBAAVmB,cAAAA;YAAlB,IAAMxI;YACTxa,KAAKkjB,GAAyB1I,GAAUwI,EAAUja,KAAKia,EAAUlB;;;mFAKrEhiB,iBAAAA,SAAmBqjB;QAAnBrjB;QACEE,KAAKojB,GAAcD,IAAc3I,SAAAA;YAC/B,IAAM6I,IAAcrjB,EAAKsjB,GAAkB9I;YAC3C,QAAQ2I,EAAanB;cACnB;gBACMhiB,EAAKujB,GAAe/I,MACtB6I,EAAYG,GAAkBL,EAAatI;gBAE7C;;cACF;;;gBAGEwI,EAAYI,MACPJ,EAAYK;;;;gBAIfL,EAAYM,MAEdN,EAAYG,GAAkBL,EAAatI;gBAC3C;;cACF;;;;;gBAKEwI,EAAYI,MACPJ,EAAYK,MACf1jB,EAAK4jB,aAAapJ;gBAMpB;;cACF;gBACMxa,EAAKujB,GAAe/I,OACtB6I,EAAYQ,MACZR,EAAYG,GAAkBL,EAAatI;gBAE7C;;cACF;gBACM7a,EAAKujB,GAAe/I;;;;gBAItBxa,EAAK8jB,GAAYtJ,IACjB6I,EAAYG,GAAkBL,EAAatI;gBAE7C;;cACF;gBACEjV;;;;;;;;;IAUR9F,iBAAAA,SACEqjB,GACAxc;QAFF7G;QAIMqjB,EAAalB,UAAUtc,SAAS,IAClCwd,EAAalB,UAAU9b,QAAQQ,KAE/B3G,KAAK+jB,GAAa5d,SAAQ,SAAC6d,GAAGxJ;YACxBxa,EAAKujB,GAAe/I,MACtB7T,EAAG6T;;;;;;;;IAWX1a,iBAAAA,SAAsBmkB;QACpB,IAAMzJ,IAAWyJ,EAAYzJ,UACvB0J,IAAgBD,EAAYlC,GAAgBjZ,OAE5Cqb,IAAankB,KAAKokB,GAA0B5J;QAClD,IAAI2J,GAAY;YACd,IAAM5J,IAAS4J,EAAW5J;YAC1B,IAAIA,EAAOzB,MACT,IAAsB,MAAlBoL,GAAqB;;;;;;;gBAOvB,IAAMnb,IAAM,IAAIT,EAAYiS,EAAOjT;gBACnCtH,KAAKkjB,GACH1I,GACAzR,GACA,IAAIuL,GAAWvL,GAAKzD,EAAgByB;mBAtWpC1D,GA0WkB,MAAlB6gB,SAKgBlkB,KAAKqkB,GAAiC7J,OACtC0J;;;YAGlBlkB,KAAK8jB,GAAYtJ,IACjBxa,KAAKskB,KAAsBtkB,KAAKskB,GAAoB/F,IAAI/D;;;;;;;IAUhE1a,iBAAAA,SAAkB6a;QAAlB7a,cACQohB,IAAgB,IAAIvM;QAE1B3U,KAAK+jB,GAAa5d,SAAQ,SAACkd,GAAa7I;YACtC,IAAM2J,IAAankB,EAAKokB,GAA0B5J;YAClD,IAAI2J,GAAY;gBACd,IAAId,EAAYpb,MAAWkc,EAAW5J,OAAOzB,MAAmB;;;;;;;;;oBAU9D,IAAM/P,IAAM,IAAIT,EAAY6b,EAAW5J,OAAOjT;oBAEH,SAAzCtH,EAAKukB,GAAuBzhB,IAAIiG,MAC/B/I,EAAKwkB,GAAuBhK,GAAUzR,MAEvC/I,EAAKkjB,GACH1I,GACAzR,GACA,IAAIuL,GAAWvL,GAAK4R;;gBAKtB0I,EAAYoB,OACdvD,EAAc5N,IAAIkH,GAAU6I,EAAYqB,OACxCrB,EAAYM;;;QAKlB,IAAItC,IAAyB/B;;;;;gBAO7Btf,KAAK2kB,GAA6Bxe,SAAQ,SAAC4C,GAAK6b;YAC9C,IAAIC;YAEJD,EAAQE,IAAatK,SAAAA;gBACnB,IAAM2J,IAAankB,EAAKokB,GAA0B5J;gBAClD,QACE2J,iCACAA,EAAW1J,OAEXoK;iBAOAA,MACFxD,IAAyBA,EAAuB9C,IAAIxV;;QAIxD,IAAMgc,IAAc,IAAIvD,GACtB7G,GACAuG,GACAlhB,KAAKskB,IACLtkB,KAAKukB,IACLlD;QAOF,OAJArhB,KAAKukB,KAAyBxF,MAC9B/e,KAAK2kB,KAA+B5B,MACpC/iB,KAAKskB,KAAsB,IAAI9F,GAAoBvZ,KAE5C8f;;;;;;;IAQTjlB,iBAAAA,SAAoB0a,GAAoBwK;QACtC,IAAKhlB,KAAKujB,GAAe/I,IAAzB;YAIA,IAAMqI,IAAa7iB,KAAKwkB,GAAuBhK,GAAUwK,EAASjc;YAI9C/I,KAAKsjB,GAAkB9I,GAC/ByK,GAAkBD,EAASjc,KAAK8Z,IAE5C7iB,KAAKukB,KAAyBvkB,KAAKukB,GAAuB/I,GACxDwJ,EAASjc,KACTic,IAGFhlB,KAAK2kB,KAA+B3kB,KAAK2kB,GAA6BnJ,GACpEwJ,EAASjc,KACT/I,KAAKklB,GAA4BF,EAASjc,KAAKwV,IAAI/D;;;;;;;;;;;IAYvD1a,iBAAAA,SACE0a,GACAzR,GACAoc;QAEA,IAAKnlB,KAAKujB,GAAe/I,IAAzB;YAIA,IAAM6I,IAAcrjB,KAAKsjB,GAAkB9I;YACvCxa,KAAKwkB,GAAuBhK,GAAUzR,KACxCsa,EAAY4B,GAAkBlc;;;YAI9Bsa,EAAY+B,GAAqBrc,IAGnC/I,KAAK2kB,KAA+B3kB,KAAK2kB,GAA6BnJ,GACpEzS,GACA/I,KAAKklB,GAA4Bnc,GAAKwK,OAAOiH,KAG3C2K,MACFnlB,KAAKukB,KAAyBvkB,KAAKukB,GAAuB/I,GACxDzS,GACAoc;;OAKNrlB,2BAAAA,SAAa0a;QACXxa,KAAK+jB,GAAaxQ,OAAOiH;;;;;;;IAQnB1a,iBAAAA,SAAiC0a;QACvC,IACM2I,IADcnjB,KAAKsjB,GAAkB9I,GACVkK;QACjC,OACE1kB,KAAK8iB,GAAiBuC,GAAuB7K,GAAUjU,OACvD4c,EAAa1B,GAAelb,OAC5B4c,EAAaxB,GAAiBpb;;;;;;IAQlCzG,iBAAAA,SAA2B0a;QAELxa,KAAKsjB,GAAkB9I,GAC/B8K;OAGNxlB,iBAAAA,SAAkB0a;QACxB,IAAIjM,IAASvO,KAAK+jB,GAAajhB,IAAI0X;QAKnC,OAJKjM,MACHA,IAAS,IAAIgX,IACbvlB,KAAK+jB,GAAazQ,IAAIkH,GAAUjM,KAE3BA;OAGDzO,iBAAAA,SAA4BiJ;QAClC,IAAIyc,IAAgBxlB,KAAK2kB,GAA6B7hB,IAAIiG;QAU1D,OARKyc,MACHA,IAAgB,IAAIhH,GAAoBvZ,KACxCjF,KAAK2kB,KAA+B3kB,KAAK2kB,GAA6BnJ,GACpEzS,GACAyc,KAIGA;;;;;;;IAQC1lB,iBAAAA,SAAe0a;QACvB,IAAMiL,IAA4D,SAA7CzlB,KAAKokB,GAA0B5J;QAIpD,OAHKiL,KACHC,GAxXU,yBAwXQ,4BAA4BlL,IAEzCiL;;;;;;IAOC3lB,iBAAAA,SAA0B0a;QAClC,IAAM6I,IAAcrjB,KAAK+jB,GAAajhB,IAAI0X;QAC1C,OAAO6I,KAAeA,EAAYK,KAC9B,OACA1jB,KAAK8iB,GAAiB6C,GAAuBnL;;;;;;;IAQ3C1a,iBAAAA,SAAY0a;QAAZ1a;QAKNE,KAAK+jB,GAAazQ,IAAIkH,GAAU,IAAI+K,KAKfvlB,KAAK8iB,GAAiBuC,GAAuB7K,GACrDrU,SAAQ4C,SAAAA;YACnB/I,EAAKkjB,GAAyB1I,GAAUzR,wBAA0B;;;;;;;IAO9DjJ,iBAAAA,SACN0a,GACAzR;QAGA,OADqB/I,KAAK8iB,GAAiBuC,GAAuB7K,GAC9C8D,IAAIvV;;;;;;;;;;;;;;;;;;;;;;;GAI5B,UAASga;IACP,OAAO,IAAIxH,GACTjT,EAAYvC;;;AAIhB,SAASoc;IACP,OAAO,IAAI5G,GAAmCjT,EAAYvC;;;;;;;;;;;;;;;;;;QClmBtD6f,KACgD;IACpDC,KAA4B;IAC5BC,MAA6B;GAIzBC,KAC6C;IACjDC,KAA0B;IAC1BC,MAAmC;IACnCC,KAA6B;IAC7BC,MAAsC;IACtCC,MAAsB;IACtBC,kBAA+B;IAC/BC,IAAmB;IACnBC,sBAAmC;;IA0BnCzmB,WACUsP,GACA8G;kBADA9G,GACApP,eAAAkW;;WAGVpW,iBAAAA,SAAc0mB;QACZ,IAAMllB,eACJklB,EAAOllB,OACHlB,EAAKG,UACL0a,GAAmBuL,EAAOllB;QAChC,OAAO,IAAI8B,EAAe9B,GAAMklB,EAAOjlB,WAAW;;;;;;;;;;IAW5CzB,iBAAAA,SAAaka;QACnB,OAAIha,KAAKkW,QAAQuQ,MAAiBxc,EAAkB+P,KAC3CA,IAEA;YAAEpY,OAAOoY;;;;;;IAOZla,iBAAAA,SACNka;QAEA,IAAIzL;QAMJ,OAAOtE,EAJLsE,IADiB,mBAARyL,IACAA,EAAIpY,QAEJoY,KAEwB,OAAOzL;;;;;IAM5CzO,iBAAAA,SAAU8B;QACR,OAAO;YAAEiK,cAAc,KAAKjK;;;;;;;IAO9B9B,iBAAAA,SAAS8B;QACP,IAAI5B,KAAKkW,QAAQuQ,IAAe;YAC9B,IAAIxa,MAAMrK,IACR,OAAO;gBAAEmK,aAAa;;YACjB,IAAInK,MAAU8kB,IAAAA,GACnB,OAAO;gBAAE3a,aAAa;;YACjB,IAAInK,cACT,OAAO;gBAAEmK,aAAa;;;QAG1B,OAAO;YAAEA,aAAa7B,EAAetI,KAAS,OAAOA;;;;;;;;IAQvD9B,iBAAAA,SAAS8B;QACP,gBnBzJ0BA;YAC5B,OACmB,mBAAVA,KACPkN,OAAOS,UAAU3N,OAChBsI,EAAetI,MAChBA,KAASkN,OAAO6X,oBAChB/kB,KAASkN,OAAO8X;UmBmJKhlB,KAAS5B,KAAK+Q,GAAUnP,KAAS5B,KAAKgR,GAASpP;;;;;IAMtE9B,gBAAAA,SAAYuF;QACV,OAAIrF,KAAKkW,QAAQuQ,KAIG,IAAIjiB,KAAyB,MAApBa,EAAUjB,SAAgByiB,cAEnB9e,QAAQ,SAAS,IAAIA,QAAQ,KAAK,aAEnD,cAAc1C,EAAUhB,aAAa4B,OAAO,WAItD;YACL7B,SAAS,KAAKiB,EAAUjB;YACxByG,OAAOxF,EAAUhB;;OAMfvE,gBAAAA,SAAc4E;QACpB,IAAMW,IAAYqF,EAAmBhG;QACrC,OAAO,IAAIJ,EAAUe,EAAUjB,SAASiB,EAAUwF;;;;;;;IAQpD/K,iBAAAA,SAAQgnB;QACN,OAAI9mB,KAAKkW,QAAQuQ,KACRK,EAAM1Y,aAEN0Y,EAAMC;;;;;IAOjBjnB,iBAAAA,SAAU8B;QACR,OAAI5B,KAAKkW,QAAQuQ,MACfpjB,cACEzB,KAAwC,mBAAVA,IAGzB2H,EAAW0F,iBAAiBrN,KAAgB,QAEnDyB,cACEzB,KAAuBA,aAAiBmI;QAGnCR,EAAW2F,eAAetN,KAAgB,IAAImI;OAIzDjK,wBAAAA,SAAU0R;QACR,OAAOxR,KAAKgnB,EAAYxV,EAAQwV;OAGlClnB,0BAAAA,SAAY0R;QAEV,OAnNanO,KAkNAmO,IACNlM,EAAgB2hB,EAAcjnB,KAAKinB,EAAczV;OAG1D1R,iBAAAA,SAAewH,GAAoB8H;QACjC,OAAOpP,KAAKknB,GAAyB9X,KAAcpP,KAAKoP,IACrDuG,MAAM,aACNA,MAAMrO,GACND;OAGLvH,iBAAAA,SAAiB4B;QACf,IAAMylB,IAAWjgB,EAAaqB,EAAW7G;QAKzC,OAnOa2B,GAgOX+jB,GAAoBD,KAGfA;OAGTrnB,iBAAAA,SAAOiJ;QACL,OAAO/I,KAAKqnB,GAAete,EAAIzB;OAGjCxH,gBAAAA,SAAS4B;QACP,IAAMylB,IAAWnnB,KAAKsnB,GAAiB5lB;QAgBvC,OA3Pa2B,GA6OX8jB,EAASrkB,IAAI,OAAO9C,KAAKoP,GAAWC,YA7OzBhM,IAoPT8jB,EAASrkB,IAAI,OAAO9C,KAAKoP,GAAWE,YACpC6X,EAASrkB,IAAI,OAAO9C,KAAKoP,GAAWE;QAMjC,IAAIhH,EAAYtI,KAAKunB,GAAiCJ;OAG/DrnB,iBAAAA,SAAYwH;QACV,OAAOtH,KAAKqnB,GAAe/f;OAG7BxH,iBAAAA,SAAc4B;QACZ,IAAM8lB,IAAexnB,KAAKsnB,GAAiB5lB;;;;;gBAK3C,OAA4B,MAAxB8lB,EAAa7hB,SACRuB,EAAakO,IAEfpV,KAAKunB,GAAiCC;OAG/CC;aAAAA;YAOE,OANa,IAAIvgB,EAAa,EAC5B,YACAlH,KAAKoP,GAAWC,WAChB,aACArP,KAAKoP,GAAWE,YAENjI;;;;QAGNvH,iBAAAA,SAAyBsP;QAC/B,OAAO,IAAIlI,EAAa,EACtB,YACAkI,EAAWC,WACX,aACAD,EAAWE;OAIPxP,iBAAAA,SACN0nB;QAMA,OAxSankB,GAqSXmkB,EAAa7hB,SAAS,KAA6B,gBAAxB6hB,EAAa1kB,IAAI,KAGvC0kB,EAAahf,EAAS;;mFAI/B1I,iBAAAA,SAAmBiJ,GAAkBsB;QACnC,OAAO;YACL3I,MAAM1B,KAAK0nB,GAAO3e;YAClBsB,QAAQA,EAAOoK,MAAMrK,SAASC;;OAIlCvK,iBAAAA,SAAWklB;QAKT,OAAO;YACLtjB,MAAM1B,KAAK0nB,GAAO1C,EAASjc;YAC3BsB,QAAQ2a,EAAS2C,KAAUvd,SAASC;YACpCqH,YAAY1R,KAAKgnB,EAAYhC,EAASxT,QAAQwV;;OAIlDlnB,iBAAAA,SACEklB,GACA5S;QAEA,IAAMrJ,IAAM/I,KAAKqO,EAAS2W,EAActjB,OAClC8P,IAAUxR,KAAK4nB,YAAY5C,EAAStT,aACpCgB,IAAO,IAAIM,GAAY;YAAE5I,UAAU;gBAAEC,QAAQ2a,EAAS3a;;;QAC5D,OAAO,IAAI0H,GAAShJ,GAAKyI,GAASkB,GAAM;YACtCN,yBAAyBA;;OAIrBtS,iBAAAA,SAAU4T;QAChBrQ,KACIqQ,EAAImU,QAGMnU,EAAImU,MAAMnmB,MACVgS,EAAImU,MAAMnW;QACxB,IAAM3I,IAAM/I,KAAKqO,EAASqF,EAAImU,MAAMnmB,OAC9B8P,IAAUxR,KAAK4nB,YAAYlU,EAAImU,MAAMnW,aACrCgB,IAAO,IAAIM,GAAY;YAAE5I,UAAU;gBAAEC,QAAQqJ,EAAImU,MAAMxd;;;QAC7D,OAAO,IAAI0H,GAAShJ,GAAKyI,GAASkB,GAAM;OAGlC5S,iBAAAA,SAAYyO;QAClBlL,KACIkL,EAAOuZ,UAGXzkB,KACIkL,EAAOwZ;QAGX,IAAMhf,IAAM/I,KAAKqO,EAASE,EAAOuZ,UAC3BtW,IAAUxR,KAAK4nB,YAAYrZ,EAAOwZ;QACxC,OAAO,IAAIzT,GAAWvL,GAAKyI;OAG7B1R,iBAAAA,SAAkByO;QAChB,OAAI,WAAWA,IACNvO,KAAKgoB,GAAUzZ,KACb,aAAaA,IACfvO,KAAKioB,GAAY1Z,KA/Xd3I;OAoYd9F,iBAAAA,SAAgBmgB;QACd,IAAIgE;QACJ,IAAI,kBAAkBhE,GAAQ;YACdA,EAAOkD;;;YAGrB,IAAMnB,IAAQhiB,KAAKkoB,GACjBjI,EAAOkD,aAAagF,oBAAoB,cAEpClG,IAAwBhC,EAAOkD,aAAalB,aAAa,IAEzDpH,IAAc7a,KAAKooB,GAAUnI,EAAOkD,aAAatI,cACjDwN,IAAapI,EAAOkD,aAAcjB,OAClCA,IAAQmG,KAAcroB,KAAKsoB,GAAcD;YAC/CpE,IAAc,IAAIsE,GAChBvG,GACAC,GACApH,GACAqH,KAAS;eAEN,IAAI,oBAAoBjC,GAAQ;YACvBA,EAAOuI;YACrB,IAAMC,IAAexI,EAAOuI;YACdC,EAAazD,UACbyD,EAAazD,SAAStjB,MAElC+mB,EAAazD,SAAStT;YAGxB,IAAM3I,IAAM/I,KAAKqO,EAASoa,EAAazD,SAAStjB,OAC1C8P,IAAUxR,KAAK4nB,YAAYa,EAAazD,SAAStT,aACjDgB,IAAO,IAAIM,GAAY;gBAC3B5I,UAAU;oBAAEC,QAAQoe,EAAazD,SAAS3a;;gBAEtCqJ,IAAM,IAAI3B,GAAShJ,GAAKyI,GAASkB,GAAM,KACvCkP,IAAmB6G,EAAaxG,aAAa,IAC7CJ,IAAmB4G,EAAa5G,oBAAoB;YAC1DoC,IAAc,IAAIyE,GAChB9G,GACAC,GACAnO,EAAI3K,KACJ2K;eAEG,IAAI,oBAAoBuM,GAAQ;YACvBA,EAAO0I;YACrB,IAAMC,IAAY3I,EAAO0I;YACXC,EAAU5D;YACxB,IAAMjc,IAAM/I,KAAKqO,EAASua,EAAU5D,WAC9BxT,IAAUoX,EAAUb,WACtB/nB,KAAK4nB,YAAYgB,EAAUb,YAC3BziB,EAAgByB,OACd2M,IAAM,IAAIY,GAAWvL,GAAKyI,IAC1BqQ,IAAmB+G,EAAU/G,oBAAoB;YACvDoC,IAAc,IAAIyE,GAAoB,IAAI7G,GAAkBnO,EAAI3K,KAAK2K;eAChE,IAAI,oBAAoBuM,GAAQ;YACvBA,EAAO4I;YACrB,IAAMC,IAAY7I,EAAO4I;YACXC,EAAU9D;YACxB,IAAMjc,IAAM/I,KAAKqO,EAASya,EAAU9D,WAC9BnD,IAAmBiH,EAAUjH,oBAAoB;YACvDoC,IAAc,IAAIyE,GAAoB,IAAI7G,GAAkB9Y,GAAK;eAC5D;YAAA,MAAI,YAAYkX,IAUrB,OA3cUra;YAmcIqa,EAAOxY;YACrB,IAAMA,IAASwY,EAAOxY;YACRA,EAAO+S;YACrB,IAAM1R,IAAQrB,EAAOqB,SAAS,GACxBiZ,IAAkB,IAAIgH,GAAgBjgB,IACtC0R,IAAW/S,EAAO+S;YACxByJ,IAAc,IAAI+E,GAAsBxO,GAAUuH;;QAIpD,OAAOkC;OAGTnkB,iBAAAA,SACEkiB;QAEA,OAAc,gBAAVA,uBAEiB,UAAVA,oBAEU,aAAVA,sBAEU,cAAVA,sBAEU,YAAVA,oBA3dCpc;OAked9F,iBAAAA,SAA0BmgB;;;;QAIxB,MAAM,kBAAkBA,IACtB,OAAO3a,EAAgByB;QAEzB,IAAMoc,IAAelD,EAAoBkD;QACzC,OAAIA,EAAalB,aAAakB,EAAalB,UAAUtc,SAC5CL,EAAgByB,QAEpBoc,EAAa4E,WAGX/nB,KAAK4nB,YAAYzE,EAAa4E,YAF5BziB,EAAgByB;OAK3BjH,iBAAAA,SAAWmpB;QAAXnpB,IACMyO;QACJ,IAAI0a,aAAoBxW,IACtBlE,IAAS;YACP2a,QAAQlpB,KAAKmpB,GAAmBF,EAASlgB,KAAKkgB,EAASrnB;gBAEpD,IAAIqnB,aAAoB1U,IAC7BhG,IAAS;YAAEgF,QAAQvT,KAAK0nB,GAAOuB,EAASlgB;gBACnC,IAAIkgB,aAAoBlW,IAC7BxE,IAAS;YACP2a,QAAQlpB,KAAKmpB,GAAmBF,EAASlgB,KAAKkgB,EAASvW;YACvD0W,YAAYppB,KAAKqpB,GAAeJ,EAAStW;gBAEtC,IAAIsW,aAAoB9U,IAC7B5F,IAAS;YACPgD,WAAW;gBACTyT,UAAUhlB,KAAK0nB,GAAOuB,EAASlgB;gBAC/B0K,iBAAiBwV,EAASxV,gBAAgB5L,KAAI0J,SAAAA;oBAC5CvR,OAAAA,EAAKspB,GAAiB/X;;;gBAIvB;YAAA,MAAI0X,aAAoBzU,KAK7B,OA9gBU5O;YA0gBV2I,IAAS;gBACPgb,QAAQvpB,KAAK0nB,GAAOuB,EAASlgB;;;QAUjC,OAJKkgB,EAASjX,GAAawX,OACzBjb,EAAOkb,kBAAkBzpB,KAAK0pB,GAAeT,EAASjX,MAGjDzD;OAGTzO,iBAAAA,SAAa2U;QAAb3U,cACQkS,IAAeyC,EAAMgV,kBACvBzpB,KAAK2pB,GAAiBlV,EAAMgV,mBAC5B7X,GAAagY;QAEjB,IAAInV,EAAMyU,QAAQ;YACFzU,EAAMyU,OAAOxnB;YAC3B,IAAMqH,IAAM/I,KAAKqO,EAASoG,EAAMyU,OAAOxnB,OACjCE,IAAQ,IAAIoR,GAAY;gBAC5B5I,UAAU;oBAAEC,QAAQoK,EAAMyU,OAAO7e;;;YAEnC,IAAIoK,EAAM2U,YAAY;gBACpB,IAAMzW,IAAY3S,KAAK6pB,GAAiBpV,EAAM2U;gBAC9C,OAAO,IAAIrW,GAAchK,GAAKnH,GAAO+Q,GAAWX;;YAEhD,OAAO,IAAIS,GAAY1J,GAAKnH,GAAOoQ;;QAEhC,IAAIyC,EAAMlB,QAAQ;YACvB,IAAMxK,IAAM/I,KAAKqO,EAASoG,EAAMlB;YAChC,OAAO,IAAIgB,GAAexL,GAAKiJ;;QAC1B,IAAIyC,EAAMlD,WAAW;YAC1B,IAAMxI,IAAM/I,KAAKqO,EAASoG,EAAMlD,UAAmByT,WAC7CvR,IAAkBgB,EAAMlD,UAAUkC,gBAAiB5L,KAAI0J,SAAAA;gBAC3DvR,OAAAA,EAAK8pB,GAAmBvY;;YAM1B,OAJAlO,UACE2O,EAAaL,SAGR,IAAIwC,GAAkBpL,GAAK0K;;QAC7B,IAAIgB,EAAM8U,QAAQ;YACvB,IAAMxgB,IAAM/I,KAAKqO,EAASoG,EAAM8U;YAChC,OAAO,IAAI/U,GAAezL,GAAKiJ;;QAE/B,OA1jBUpM;OA8jBN9F,iBAAAA,SAAekS;QAErB,kBAAIA,EAAaN,aACR;YACLA,YAAY1R,KAAK+pB,UAAU/X,EAAaN;uBAEjCM,EAAaL,SACf;YAAEA,QAAQK,EAAaL;YArkBpB/L;OA2kBN9F,iBAAAA,SAAiBkS;QACvB,kBAAIA,EAAaN,aACRE,GAAaF,WAAW1R,KAAK4nB,YAAY5V,EAAaN,0BACpDM,EAAaL,SACfC,GAAaD,OAAOK,EAAaL,UAEjCC,GAAagY;OAIhB9pB,iBAAAA,SACN2U,GACAuV;;QAGA,IAAIxY,IAAUiD,EAAM/C,aAChB1R,KAAK4nB,YAAYnT,EAAM/C,cACvB1R,KAAK4nB,YAAYoC;QAEjBxY,EAAQhM,QAAQF,EAAgByB;;;;;;QAMlCyK,IAAUxR,KAAK4nB,YAAYoC;QAG7B,IAAIvY,IAAuC;QAI3C,OAHIgD,EAAMhD,oBAAoBgD,EAAMhD,iBAAiB9L,SAAS,MAC5D8L,IAAmBgD,EAAMhD;QAEpB,IAAIwY,GAAezY,GAASC;OAGrC3R,iBAAAA,SACEoqB,GACAF;QAFFlqB;QAIE,OAAIoqB,KAAUA,EAAOvkB,SAAS,KA7lBjBtC,cA+lBT2mB,IAGKE,EAAOriB,KAAI4M,SAAAA;YAASzU,OAAAA,EAAKmqB,GAAgB1V,GAAOuV;eAEhD;OAIHlqB,iBAAAA,SAAiBkU;QACvB,IAAMzC,IAAYyC,EAAezC;QACjC,IAAIA,aAAqBxB,IACvB,OAAO;YACLmB,WAAW8C,EAAe1C,MAAMjK;YAChC+iB,kBAAkB;;QAEf,IAAI7Y,aAAqBjB,IAC9B,OAAO;YACLY,WAAW8C,EAAe1C,MAAMjK;YAChCgjB,uBAAuB;gBACrBje,QAAQmF,EAAUvB;;;QAGjB,IAAIuB,aAAqBf,IAC9B,OAAO;YACLU,WAAW8C,EAAe1C,MAAMjK;YAChCijB,oBAAoB;gBAClBle,QAAQmF,EAAUvB;;;QAGjB,IAAIuB,aAAqBN,IAC9B,OAAO;YACLC,WAAW8C,EAAe1C,MAAMjK;YAChCkjB,WAAWhZ,EAAUb;;QAGvB,MAxpBU9K;OA4pBN9F,iBAAAA,SAAmB2U;QACzB,IAAIlD,IAAuC;QAC3C,IAAI,sBAAsBkD,GACxBpR,GAC6B,mBAA3BoR,EAAM2V,mBAGR7Y,IAAYxB,GAAyBya,eAChC,IAAI,2BAA2B/V,GAAO;YAC3C,IAAMrI,IAASqI,EAAM4V,sBAAuBje,UAAU;YACtDmF,IAAY,IAAIjB,GAA6BlE;eACxC,IAAI,wBAAwBqI,GAAO;YACxC,IAAMrI,IAASqI,EAAM6V,mBAAoBle,UAAU;YACnDmF,IAAY,IAAIf,GAA8BpE;eACrC,eAAeqI,IACxBlD,IAAY,IAAIN,GACdjR,MACAyU,EAAgB8V,aAGlB3kB;QAEF,IAAMsL,IAAYvJ,EAAU8iB,EAAiBhW,EAAgBvD;QAC7D,OAAO,IAAIwZ,GAAexZ,GAAWK;OAGvCzR,iBAAAA,SAAkBya;QAChB,OAAO;YAAEuG,WAAW,EAAC9gB,KAAK2qB,GAAYpQ,EAAOjT;;OAG/CxH,iBAAAA,SAAoB8qB;QArqBLvnB,GAwqBD,MAFEunB,EAAgB9J,UAAWnb;QAKzC,IAAMjE,IAAOkpB,EAAgB9J,UAAW;QACxC,OAAO7J,GAAM4T,GAAO7qB,KAAK8qB,GAAcppB,IAAOuW;OAGhDnY,iBAAAA,SAAcya;;QAEZ,IAAMhM,IAA0B;YAAEwc,iBAAiB;WAC7CzjB,IAAOiT,EAAOjT;QACW,SAA3BiT,EAAOjE,mBAKT/H,EAAOyc,SAAShrB,KAAK2qB,GAAYrjB,IACjCiH,EAAOwc,gBAAiBE,OAAO,EAC7B;YACExiB,cAAc8R,EAAOjE;YACrB4U;gBAQJ3c,EAAOyc,SAAShrB,KAAK2qB,GAAYrjB,EAAK6jB,MACtC5c,EAAOwc,gBAAiBE,OAAO,EAAC;YAAExiB,cAAcnB,EAAKoN;;QAGvD,IAAM0W,IAAQprB,KAAKqrB,GAAS9Q,EAAO/D;QAC/B4U,MACF7c,EAAOwc,gBAAiBK,QAAQA;QAGlC,IAAM7U,IAAUvW,KAAKsrB,GAAQ/Q,EAAOhE;QAChCA,MACFhI,EAAOwc,gBAAiBxU,UAAUA;QAGpC,IAAMrQ,IAAQlG,KAAKurB,GAAahR,EAAOrU;QAYvC,OAXc,SAAVA,MACFqI,EAAOwc,gBAAiB7kB,QAAQA,IAG9BqU,EAAO9D,YACTlI,EAAOwc,gBAAiBtU,UAAUzW,KAAKwrB,GAASjR,EAAO9D;QAErD8D,EAAO7D,UACTnI,EAAOwc,gBAAiBrU,QAAQ1W,KAAKwrB,GAASjR,EAAO7D,SAGhDnI;OAGTzO,iBAAAA,SAAgBya;QACd,IAAIjT,IAAOtH,KAAK8qB,GAAcvQ,EAAcyQ,SAEtC1K,IAAQ/F,EAAOwQ,iBACfU,IAAYnL,EAAM2K,OAAO3K,EAAM2K,KAAKtlB,SAAS,GAC/C2Q,IAAiC;QACrC,IAAImV,IAAY,GAAG;YAvuBNpoB,GAyuBK,MAAdooB;YAGF,IAAMR,IAAO3K,EAAM2K,KAAM;YACrBA,EAAKC,iBACP5U,IAAkB2U,EAAKxiB,eAEvBnB,IAAOA,EAAKqO,MAAMsV,EAAKxiB;;QAI3B,IAAIijB,IAAqB;QACrBpL,EAAM8K,UACRM,IAAW1rB,KAAK2rB,GAAWrL,EAAM8K;QAGnC,IAAI7U,IAAqB;QACrB+J,EAAM/J,YACRA,IAAUvW,KAAK4rB,GAAUtL,EAAM/J;QAGjC,IAAIrQ,IAAuB;QACvBoa,EAAMpa,UACRA,IAAQlG,KAAK6rB,GAAevL,EAAMpa;QAGpC,IAAIuQ,IAAwB;QACxB6J,EAAM7J,YACRA,IAAUzW,KAAK8rB,GAAWxL,EAAM7J;QAGlC,IAAIC,IAAsB;QAK1B,OAJI4J,EAAM5J,UACRA,IAAQ1W,KAAK8rB,GAAWxL,EAAM5J,SAGzB,IAAIO,GACT3P,GACAgP,GACAC,GACAmV,GACAxlB,qBAEAuQ,GACAC,GACAuB;OAGJnY,iBAAAA,SACEqkB;QAEA,IAAMviB,IAAQ5B,KAAK+rB,GAAQ5H,EAAW1J;QACtC,OAAa,QAAT7Y,IACK,OAEA;YACLoqB,oBAAoBpqB;;OAKlB9B,iBAAAA,SAAQ2a;QACd,QAAQA;UACN;YACE,OAAO;;UACT;YACE,OAAO;;UACT;YACE,OAAO;;UACT;YACE,OAp0BQ7U;;OAw0Bd9F,iBAAAA,SAASqkB;QACP,IAAI5V,GACEgM,IAAS4J,EAAW5J;QAc1B,QAXEhM,IADEgM,EAAOzB,OACA;YAAEgI,WAAW9gB,KAAKisB,GAAkB1R;YAEpC;YAAE+F,OAAOtgB,KAAKksB,GAAc3R;WAGhCC,WAAW2J,EAAW3J,UAEzB2J,EAAWtJ,YAAY8H,OAAwB,MACjDpU,EAAOsM,cAAc7a,KAAKmsB,GAAQhI,EAAWtJ;QAGxCtM;OAGDzO,iBAAAA,SAAS0W;QAAT1W;QACN,IAAuB,MAAnB0W,EAAQ7Q,QAAZ;YAGA,IAAMukB,IAAS1T,EAAQ3O,KAAIJ,SAAAA;gBACrBA,OAAAA,aAAkBiR,KACb1Y,EAAKosB,GAAqB3kB,KAj2BzB7B;;YAs2BZ,OAAsB,MAAlBskB,EAAOvkB,SACFukB,EAAO,KAET;gBAAEmC,iBAAiB;oBAAExT,IAAI;oBAAOrC,SAAS0T;;;;OAG1CpqB,iBAAAA,SAAW2H;QAAX3H;QACN,OAAK2H,eAEMA,EAAO6kB,cACT,EAACtsB,KAAKusB,GAAgB9kB,kBACpBA,EAAO+kB,cACT,EAACxsB,KAAKysB,GAAgBhlB,kBACpBA,EAAO4kB,kBACT5kB,EAAO4kB,gBACX7V,QAAS3O,KAAI+O,SAAAA;YAAK5W,OAAAA,EAAK2rB,GAAW/U;YAClC8V,QAAO,SAACC,GAAO1kB;YAAY0kB,OAAAA,EAAM7U,OAAO7P;cAt3BjCrC,OA82BH;OAcH9F,iBAAAA,SAAQmZ;QAARnZ;QACN,IAAwB,MAApBmZ,EAAStT,QAGb,OAAOsT,EAASpR,KAAI+kB,SAAAA;YAAS5sB,OAAAA,EAAK6sB,GAAgBD;;OAG5C9sB,iBAAAA,SAAUmZ;QAAVnZ;QACN,OAAOmZ,EAASpR,KAAI+kB,SAAAA;YAAS5sB,OAAAA,EAAK8sB,GAAkBF;;OAG9C9sB,iBAAAA,SAASitB;QACf,OAAO;YACL3T,QAAQ2T,EAAO3T;YACfhN,QAAQ2gB,EAAO5T;;OAIXrZ,iBAAAA,SAAWitB;QACjB,IAAM3T,MAAW2T,EAAO3T,QAClBD,IAAW4T,EAAO3gB,UAAU;QAClC,OAAO,IAAI8M,GAAMC,GAAUC;;;IAI7BtZ,iBAAAA,SAAY8X;QACV,OAAOgO,GAAWhO;;;IAIpB9X,iBAAAA,SAAc8X;QACZ,QAAQA;UACN,KAAK;YACH;;UACF,KAAK;YACH;;UACF;YACE;;;;IAKN9X,iBAAAA,SAAe+Y;QACb,OAAOkN,GAAUlN;OAGnB/Y,iBAAAA,SAAiB+Y;QACf,QAAQA;UACN,KAAK;YACH;;UACF,KAAK;YACH;;UACF,KAAK;YACH;;UACF,KAAK;YACH;;UACF,KAAK;YACH;;UACF,KAAK;YACH;;UACF,KAAK;YACH;;UACF,KAAK;YACH;;UACF,KAAK;UAEL;YACE,OA/7BQjT;;OAm8Bd9F,iBAAAA,SAAqBwH;QACnB,OAAO;YAAE4J,WAAW5J,EAAKD;;OAG3BvH,iBAAAA,SAAuBktB;QACrB,OAAOrlB,EAAU8iB,EAAiBuC,EAAyB9b;;;IAI7DpR,iBAAAA,SAAgByW;QACd,OAAO;YACLjF,OAAOtR,KAAKitB,GAAqB1W,EAAQjF;YACzC4b,WAAWltB,KAAKmtB,GAAY5W,EAAQqB;;OAIxC9X,iBAAAA,SAAkByW;QAChB,OAAO,IAAIiB,GACTxX,KAAKotB,GAAuB7W,EAAcjF,QAC1CtR,KAAKqtB,GAAc9W,EAAQ2W;OAI/BptB,iBAAAA,SAAgB2H;QACd,OAAOiR,GAAY4U,OACjBttB,KAAKotB,GAAuB3lB,EAAO+kB,YAAmBlb,QACtDtR,KAAKutB,GAAiB9lB,EAAO+kB,YAAgB3T,KAC7CpR,EAAO+kB,YAAmB5qB;;;IAK9B9B,iBAAAA,SAAqB2H;QACnB,yBAAIA,EAAOoR,IAAuB;YAChC,IAAInJ,GAAWjI,EAAO7F,QACpB,OAAO;gBACL0qB,aAAa;oBACXhb,OAAOtR,KAAKitB,GAAqBxlB,EAAO6J;oBACxCuH,IAAI;;;YAGH,IAAIpJ,GAAYhI,EAAO7F,QAC5B,OAAO;gBACL0qB,aAAa;oBACXhb,OAAOtR,KAAKitB,GAAqBxlB,EAAO6J;oBACxCuH,IAAI;;;;QAKZ,OAAO;YACL2T,aAAa;gBACXlb,OAAOtR,KAAKitB,GAAqBxlB,EAAO6J;gBACxCuH,IAAI7Y,KAAKwtB,GAAe/lB,EAAOoR;gBAC/BjX,OAAO6F,EAAO7F;;;OAKpB9B,iBAAAA,SAAgB2H;QACd,QAAQA,EAAO6kB,YAAgBzT;UAC7B,KAAK;YACH,IAAM4U,IAAWztB,KAAKotB,GACpB3lB,EAAO6kB,YAAmBhb;YAE5B,OAAOoH,GAAY4U,OAAOG,sBAA0B;gBAClD1hB,aAAa2hB;;;UAEjB,KAAK;YACH,IAAMC,IAAY3tB,KAAKotB,GACrB3lB,EAAO6kB,YAAmBhb;YAE5B,OAAOoH,GAAY4U,OAAOK,sBAA2B;gBACnDC,WAAW;;;UAEf,KAAK;UAEL;YACE,OAjhCQhoB;;OAqhCd9F,iBAAAA,SAAe6S;QACb,IAAMkb,IAA4B;QAIlC,OAHAlb,EAAUtI,OAAOlE,SAAQmL,SAAAA;YACvBuc,OAAAA,EAAgBxnB,KAAKiL,EAAMjK;aAEtB;YACLymB,YAAYD;;OAIhB/tB,iBAAAA,SAAiB2U;QACf,IAAMsZ,IAAQtZ,EAAMqZ,cAAc;QAClC,OAAO,IAAI/X,GAAUgY,EAAMlmB,KAAIP,SAAAA;YAAQK,OAAAA,EAAU8iB,EAAiBnjB;;;;;;;;;;aAItD8f,GAAoB9f;;IAElC,OACEA,EAAK3B,UAAU,KACC,eAAhB2B,EAAKxE,IAAI,MACO,gBAAhBwE,EAAKxE,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;kBCpgCXhD,SAAmBkuB;QACbxkB,EAAgBwkB,YAClBpoB,MAEF4D,EAAgBwkB,WAAWA;cAG7BluB;QAIE,OAHK0J,EAAgBwkB,YACnBpoB,MAEK4D,EAAgBwkB;;KCzDrBC,KAAY,IAAIC,EAAO;;;;;;;;;;;;;;;;;;;SAGbC;IACd,OAAOF,GAAUG;;;SAGHC,GAAYC;IAC1BL,GAAUG,WAAWE;;;SAGP5I,GAAS6I;;IACvB,IAAIN,GAAUG,YAAYI,EAASC,OAAO;QACxC,IAAMC,IAAO7lB,EAAIhB,IAAI8mB;QACrBV,GAAUW,YAAVX,QAAgB,gBAAcruB,YAAiB2uB,KAAUG;;;;SAI7CxT,GAASqT;;IACvB,IAAIN,GAAUG,YAAYI,EAASK,OAAO;QACxC,IAAMH,IAAO7lB,EAAIhB,IAAI8mB;QACrBV,GAAUa,YAAVb,QAAgB,gBAAcruB,YAAiB2uB,KAAUG;;;;;;aAOpDC,GAAY9lB;IACnB,IAAmB,mBAARA,GACT,OAAOA;IAEP,IAAMmlB,IAAWxkB,GAAgBC;IACjC;QACE,OAAOukB,EAASe,GAAWlmB;MAC3B,OAAOmmB;;QAEP,OAAOnmB;;;;;;;;;;;;;;;;;;;;;;;;;;;aChCGjD,GAAKqpB;qBAAAA;;;QAGnB,IAAM1tB,IACJ,gBAAc3B,sCAA6CqvB;;;;QAM7D,MALA/T,GAAS3Z,IAKH,IAAII,MAAMJ;;;;;;;;;SASF8B,GACd6rB,GACA3tB;IAEK2tB,KACHtpB;;;;;;aAyBYupB,GACdtmB;;AAEAumB;IAMA,OAAOvmB;;;;;;;;;;;;;;;;;;;;kBC5DP/I;QAaE;;QAXA,IAAMuvB,IACJ,kEAEIC,IAAczqB,KAAKC,MAAM,MAAMuqB,EAAM1pB,UAAU0pB,EAAM1pB,QAMvD4pB,IAAS;;UAENA,EAAO5pB,SADO,MAGnB,KADA,IAAMmhB,IAAQtd,GAAgBC,KAAc+lB,GAAY,KAC/C/oB,IAAI,GAAGA,IAAIqgB,EAAMnhB,UAAUc;;;QAG9B8oB,EAAO5pB,SANM,MAMmBmhB,EAAMrgB,KAAK6oB,MAC7CC,KAAUF,EAAMI,OAAO3I,EAAMrgB,KAAK4oB,EAAM1pB;QAM9C,OAAO4pB;;;;SAIKtqB,GAAuB+B,GAASC;IAC9C,OAAID,IAAOC,KACD,IAEND,IAAOC,IACF,IAEF;;;0DAQOiF,GACdlF,GACAC,GACAlB;IAEA,OAAIiB,EAAKrB,WAAWsB,EAAMtB,UAGnBqB,EAAK0oB,OAAM,SAAC9tB,GAAO4E;QAAUT,OAAAA,EAAWnE,GAAOqF,EAAMT;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC9C5D1G,SACWsP,GACAugB,GACAC,GACAC,GACAC;cAJA1gB,GACApP,sBAAA2vB,GACA3vB,YAAA4vB,GACA5vB,WAAA6vB,GACA7vB,wBAAA8vB;;IAUXhwB,WAAqBuP,GAAmBC;QAAnBtP,iBAAAqP,GACnBrP,KAAKsP,WAAWA,KANU;;WAS5BygB;aAAAA;YACE,OAV0B,gBAUnB/vB,KAAKsP;;;;QAGdxP,sBAAAA,SAAQkF;QACN,OACEA,aAAiBgrB,KACjBhrB,EAAMqK,cAAcrP,KAAKqP,aACzBrK,EAAMsK,aAAatP,KAAKsP;OAI5BxP,gBAAAA,SAAUkF;QACR,OACEC,GAAoBjF,KAAKqP,WAAWrK,EAAMqK,cAC1CpK,GAAoBjF,KAAKsP,UAAUtK,EAAMsK;;;IC3B7CxP,WAAoBmwB;kBAAAA;;;;;;;QAJpBjwB,UAEI;;;WAKJF,kBAAAA,SAAIiJ;QACF,IAAMmnB,IAAKlwB,KAAKiwB,GAASlnB,IACnByQ,IAAUxZ,KAAKmwB,GAAMD;QAC3B,eAAI1W,GAGJ,KAAgCA,WAAAA,OAAAA,cAAAA;0BAApB4W,UAAUxuB;YACpB,IAAIwuB,EAAS5qB,QAAQuD,IACnB,OAAOnH;;OAMb9B,kBAAAA,SAAIiJ;QACF,kBAAO/I,KAAK8C,IAAIiG;;8CAIlBjJ,kBAAAA,SAAIiJ,GAAcnH;QAChB,IAAMsuB,IAAKlwB,KAAKiwB,GAASlnB,IACnByQ,IAAUxZ,KAAKmwB,GAAMD;QAC3B,eAAI1W,GAAJ;YAIA,KAAK,IAAI/S,IAAI,GAAGA,IAAI+S,EAAQ7T,QAAQc,KAClC,IAAI+S,EAAQ/S,GAAG,GAAGjB,QAAQuD,IAExB,aADAyQ,EAAQ/S,KAAK,EAACsC,GAAKnH;YAIvB4X,EAAQnT,KAAK,EAAC0C,GAAKnH;eATjB5B,KAAKmwB,GAAMD,KAAM,EAAC,EAACnnB,GAAKnH;;;;;IAe5B9B,qBAAAA,SAAOiJ;QACL,IAAMmnB,IAAKlwB,KAAKiwB,GAASlnB,IACnByQ,IAAUxZ,KAAKmwB,GAAMD;QAC3B,eAAI1W,GACF;QAEF,KAAK,IAAI/S,IAAI,GAAGA,IAAI+S,EAAQ7T,QAAQc,KAClC,IAAI+S,EAAQ/S,GAAG,GAAGjB,QAAQuD,IAMxB,OALuB,MAAnByQ,EAAQ7T,gBACH3F,KAAKmwB,GAAMD,KAElB1W,EAAQ6W,OAAO5pB,GAAG;;QAKxB;OAGF3G,sBAAAA,SAAQ6G;QACNR,EAAQnG,KAAKmwB,KAAO,SAACnM,GAAG/O;YACtB,KAAqBA,WAAAA,OAAAA,cAAAA;8BAATkH,UAAGxP;gBACbhG,EAAGwV,GAAGxP;;;OAKZ7M,gBAAAA;QACE,OAAOsJ,EAAQpJ,KAAKmwB;;;;;;;;;;;;;;ICzDtBrwB,WACSwwB,GACA7lB,GACA8lB,GACAC;QAHAxwB,eAAAswB,aACA7lB,GACAzK,qBAAAuwB,GACAvwB,iBAAAwwB;;;;;;;;;;;WAcT1wB,iBAAAA,SACE2wB,GACA3e,GACA4e;QAkBA,KARA,IAAMC,IAAkBD,EAAYC,IAQ3BlqB,IAAI,GAAGA,IAAIzG,KAAKwwB,UAAU7qB,QAAQc,KAAK;YAC9C,IAAMwiB,IAAWjpB,KAAKwwB,UAAU/pB;YAChC,IAAIwiB,EAASlgB,IAAIvD,QAAQirB,IAAS;gBAChC,IAAMve,IAAiBye,EAAgBlqB;gBACvCqL,IAAWmX,EAAS7U,GAAsBtC,GAAUI;;;QAGxD,OAAOJ;;;;;;;;;IAUThS,iBAAAA,SACE2wB,GACA3e;;;QAYA,KAAuB9R,WAAAA,IAAAA,KAAKuwB,eAALvwB,cAAAA;YAAlB,IAAMipB;YACLA,EAASlgB,IAAIvD,QAAQirB,OACvB3e,IAAWmX,EAAS5U,GAClBvC,GACAA,GACA9R,KAAKyK;;;QAQX,KAHA,IAAM4H,IAAUP,UAGO9R,IAAAA,KAAKwwB,WAALxwB,cAAAA;YAAlB,IAAMipB;YACLA,EAASlgB,IAAIvD,QAAQirB,OACvB3e,IAAWmX,EAAS5U,GAClBvC,GACAO,GACArS,KAAKyK;;QAIX,OAAOqH;;;;;;IAOThS,iBAAAA,SAAwB8wB;QAAxB9wB,cAIM+wB,IAAmBD;;;;gBAUvB,OATA5wB,KAAKwwB,UAAUrqB,SAAQ2qB,SAAAA;YACrB,IAAMC,IAAkB/wB,EAAKqU,GAC3Byc,EAAE/nB,KACF6nB,EAAU9tB,IAAIguB,EAAE/nB;YAEdgoB,MACFF,IAAmBA,EAAiBrV,GAAOsV,EAAE/nB,KAAKgoB;aAG/CF;OAGT/wB,mBAAAA;QACE,OAAOE,KAAKwwB,UAAU9D,QACpB,SAAC7e,GAAMijB;YAAMjjB,OAAAA,EAAK0Q,IAAIuS,EAAE/nB;YACxBuW;OAIJxf,sBAAAA,SAAQkF;QACN,OACEhF,KAAKswB,YAAYtrB,EAAMsrB,WACvBpkB,GAAYlM,KAAKwwB,WAAWxrB,EAAMwrB,YAAW,SAACpf,GAAGC;YAAMD,OAAAA,EAAE5L,QAAQ6L;eACjEnF,GAAYlM,KAAKuwB,eAAevrB,EAAMurB,gBAAe,SAACnf,GAAGC;YACvDD,OAAAA,EAAE5L,QAAQ6L;;;;IAQhBvR,WACWkxB,GACAC,GACAN,GACAO;;;;;IAKAC;QARAnxB,aAAAgxB,aACAC,aACAN,GACA3wB,mBAAAkxB,aAKAC;;;;;;;oBAQXrxB,SACEkxB,GACAC,GACAG,GACAF;QAnKiC7tB,GAsK/B2tB,EAAMR,UAAU7qB,WAAWyrB,EAAQzrB;QASrC,KAFA,IAAI0rB,IAAajS,MACXoR,IAAYQ,EAAMR,WACf/pB,IAAI,GAAGA,IAAI+pB,EAAU7qB,QAAQc,KACpC4qB,IAAaA,EAAW7V,GAAOgV,EAAU/pB,GAAGsC,KAAKqoB,EAAQ3qB,GAAG+K;QAG9D,OAAO,IAAI8f,EACTN,GACAC,GACAG,GACAF,GACAG;;;IC7KJvxB,WAAYyxB;QAAZzxB;;;gBAZAE,UAAqD,MACrDA,UAAkD;;QAG1CA,sBACAA,qBACRA;;;QAIAA,cAGEuxB,GACE3vB,SAAAA;YACE5B,EAAKwxB,SACLxxB,EAAKuO,SAAS3M,GACV5B,EAAKyxB;;;YAGPzxB,EAAKyxB;aAGT3C,SAAAA;YACE9uB,EAAKwxB,SACLxxB,EAAK8uB,QAAQA,GACT9uB,EAAK0xB,MACP1xB,EAAK0xB,GAAc5C;;;WAM3BhvB,oBAAAA,SACE6G;QAEA,OAAO3G,KAAKqI,aAAgB1B;OAG9B7G,mBAAAA,SACE6xB,GACAC;QAFF9xB;QAQE,OAJIE,KAAK6xB,MACPjsB,MAEF5F,KAAK6xB,SACD7xB,KAAKwxB,KACFxxB,KAAK8uB,QAGD9uB,KAAK8xB,GAAYF,GAAS5xB,KAAK8uB,SAF/B9uB,KAAK+xB,GAAYJ,GAAQ3xB,KAAYuO,UAKvC,IAAIyjB,GAAsB,SAAC/vB,GAASgwB;YACzCjyB,EAAKyxB,KAAgB7vB,SAAAA;gBACnB5B,EAAK+xB,GAAYJ,GAAQ/vB,GAAOyG,KAAKpG,GAASgwB;eAEhDjyB,EAAK0xB,KAAiB5C,SAAAA;gBACpB9uB,EAAK8xB,GAAYF,GAAS9C,GAAOzmB,KAAKpG,GAASgwB;;;OAMvDnyB,iBAAAA;QAAAA;QACE,OAAO,IAAIkC,SAAQ,SAACC,GAASgwB;YAC3BjyB,EAAKqI,KAAKpG,GAASgwB;;OAIfnyB,iBAAAA,SACN6G;QAEA;YACE,IAAM4H,IAAS5H;YACf,OAAI4H,aAAkByjB,IACbzjB,IAEAyjB,EAAmB/vB,QAAQsM;UAEpC,OAAOygB;YACP,OAAOgD,EAAmBC,OAAUjD;;OAIhClvB,iBAAAA,SACN6xB,GACA/vB;QAEA,OAAI+vB,IACK3xB,KAAKkyB,IAAiB;YAAMP,OAAAA,EAAO/vB;cAGnCowB,EAAmB/vB,QAAYL;OAIlC9B,iBAAAA,SACN8xB,GACA9C;QAEA,OAAI8C,IACK5xB,KAAKkyB,IAAiB;YAAMN,OAAAA,EAAQ9C;cAEpCkD,EAAmBC,OAAUnD;mBAMxChvB,SAAkByO;QAChB,OAAO,IAAIyjB,GAA6B,SAAC/vB,GAASgwB;YAChDhwB,EAAQsM;;kBAIZzO,SAAiBgvB;QACf,OAAO,IAAIkD,GAAsB,SAAC/vB,GAASgwB;YACzCA,EAAOnD;;cAIXhvB;;;IAGEqyB;QAEA,OAAO,IAAIH,GAAyB,SAAC/vB,GAASgwB;YAC5C,IAAI/N,IAAgB,GAChBkO,IAAgB,GAChBC;YAEJF,EAAIhsB,SAAQkK,SAAAA;kBACR6T,GACF7T,EAAQhI,MACN;sBACI+pB,GACEC,KAAQD,MAAkBlO,KAC5BjiB;qBAGJqwB,SAAAA;oBAAOL,OAAAA,EAAOK;;iBAIlBD,QACID,MAAkBlO,KACpBjiB;;;;;;;;;WAWNnC,SACEyyB;QAKA,KAHA,IAAItY,IAAiC+X,EAAmB/vB,0BAG7CuwB;YACTvY,IAAIA,EAAE5R,MAAKoqB,SAAAA;gBACLA,OAAAA,IACKT,EAAmB/vB,QAAiBwwB,KAEpCD;;kBALWD,OAAAA,cAAAA;;;QASxB,OAAOtY;mBAkBTna,SACE4yB,GACA9b;QAFF9W,cAIQ6yB,IAA4C;QAIlD,OAHAD,EAAWvsB,SAAQ,SAACkL,GAAGuhB;YACrBD,EAAStsB,KAAKuQ,EAAEzN,KAAKnJ,GAAMqR,GAAGuhB;aAEzB5yB,KAAK6yB,GAAQF;;;ICnMtB7yB,WACWgzB,GACAC,GACAC;kBAFAF,aACAC,aACAC;;;;;;;;WASXlzB,iBAAAA,SACEmzB,GACAlqB;QAFFjJ;QAIE,OAAOE,KAAK+yB,GACTG,GAA0CD,GAAalqB,GACvDV,MAAK8qB,SAAAA;YAAWnzB,OAAAA,EAAKozB,GAAoBH,GAAalqB,GAAKoqB;;;0EAIxDrzB,iBAAAA,SACNmzB,GACAlqB,GACAsqB;QAEA,OAAOrzB,KAAK8yB,GAAoBQ,GAASL,GAAalqB,GAAKV,MAAKqL,SAAAA;YAC9D,KAAoB2f,WAAAA,OAAAA,cAAAA;gBAClB3f,SAAYW,GAAiBtL,GAAK2K;;YAEpC,OAAOA;;;;;IAMH5T,iBAAAA,SACNmzB,GACA1S,GACA4S;QAEA,IAAI/B,IAAUpS;QAOd,OANAuB,EAAKpa,SAAQ,SAAC4C,GAAKwqB;YACjB,KAAoBJ,WAAAA,OAAAA,cAAAA;gBAClBI,SAAkBlf,GAAiBtL,GAAKwqB;;YAE1CnC,IAAUA,EAAQ5V,GAAOzS,GAAKwqB;aAEzBnC;;;;;;;;IASTtxB,iBAAAA,SACEmzB,GACAplB;QAFF/N;QAIE,OAAOE,KAAK8yB,GACTU,WAAWP,GAAaplB,GACxBxF,MAAKkY,SAAAA;YAAQvgB,OAAAA,EAAKyzB,GAAwBR,GAAa1S;;;;;;;IAO5DzgB,iBAAAA,SACEmzB,GACAS;QAFF5zB;QAIE,OAAOE,KAAK+yB,GACTY,GAA2CV,GAAaS,GACxDrrB,MAAK8qB,SAAAA;YACJ,IAAM5S,IAAOvgB,EAAK4zB,GAChBX,GACAS,GACAP,IAEE/B,IAAUrS;YASd,OARAwB,EAAKpa,SAAQ,SAAC4C,GAAK+I;;gBAEZA,MACHA,IAAW,IAAIwC,GAAWvL,GAAKzD,EAAgByB,SAEjDqqB,IAAUA,EAAQ5V,GAAOzS,GAAK+I;iBAGzBsf;;;;;;;;;;;IAYbtxB,iBAAAA,SACEmzB,GACA3S,GACAuT;QAEA,OAAIvT,EAAMxH,OACD9Y,KAAK8zB,GAAkCb,GAAa3S,EAAMhZ,QACxDgZ,EAAMyT,OACR/zB,KAAKg0B,GACVf,GACA3S,GACAuT,KAGK7zB,KAAKi0B,GACVhB,GACA3S,GACAuT;OAKE/zB,iBAAAA,SACNmzB,GACA5Z;;QAGA,OAAOrZ,KAAKk0B,GAAYjB,GAAa,IAAI3qB,EAAY+Q,IAAUhR,MAC7DyJ,SAAAA;YACE,IAAIvD,IAAS2Q;YAIb,OAHIpN,aAAoBC,OACtBxD,IAASA,EAAOiN,GAAO1J,EAAS/I,KAAK+I,KAEhCvD;;OAKLzO,iBAAAA,SACNmzB,GACA3S,GACAuT;QAHM/zB,cASA2I,IAAe6X,EAAMhK,iBACvB8a,IAAUlS;QACd,OAAOlf,KAAKgzB,GACTmB,GAAqBlB,GAAaxqB,GAClCJ,MAAK+rB,SAAAA;YAGGpC,OAAAA,GAAmB7rB,QAAQiuB,IAAUpJ,SAAAA;gBAC1C,IAAMqJ,IAAkB/T,EAAMgU,GAC5BtJ,EAAOrV,MAAMlN;gBAEf,OAAOzI,EAAKi0B,GACVhB,GACAoB,GACAR,GACAxrB,MAAKgJ,SAAAA;oBACLA,EAAElL,SAAQ,SAAC4C,GAAK2K;wBACd0d,IAAUA,EAAQ5V,GAAOzS,GAAK2K;;;gBAGjCrL,MAAK;gBAAM+oB,OAAAA;;;OAIZtxB,iBAAAA,SACNmzB,GACA3S,GACAuT;QAHM/zB,IAMFsxB,GACAmD;;gBACJ,OAAOv0B,KAAK8yB,GACT0B,GAA0BvB,GAAa3S,GAAOuT,GAC9CxrB,MAAKosB,SAAAA;mBACJrD,IAAUqD,GACHz0B,EAAK+yB,GAAc2B,GACxBzB,GACA3S;YAGHjY,MAAKssB,SAAAA;mBACJJ,IAAkBI,GAOX30B,EAAK40B,GACV3B,GACAsB,GACAnD,GACA/oB,MAAKwsB,SAAAA;gBACLzD,IAAUyD;gBAEV,KAAoBN,WAAAA,OAAAA,cAAAA,KAClB,KADG,IAAMvD,iBACcA,IAAAA,EAAMR,WAANQ,cAAAA,KAAiB;oBAAnC,IAAM/H,UACHlgB,IAAMkgB,EAASlgB,KACfsJ,IAAU+e,EAAQtuB,IAAIiG,IACtB+rB,IAAa7L,EAAS5U,GAC1BhC,GACAA,GACA2e,EAAMvmB;oBAGN2mB,IADE0D,aAAsB/iB,KACdqf,EAAQ5V,GAAOzS,GAAK+rB,KAEpB1D,EAAQzV,OAAO5S;;;YAMlCV,MAAK;;;mBAGJ+oB,EAAQjrB,SAAQ,SAAC4C,GAAK2K;gBACf4M,EAAM9G,QAAQ9F,OACjB0d,IAAUA,EAAQzV,OAAO5S;iBAItBqoB;;OAILtxB,iBAAAA,SACNmzB,GACA0B,GACAI;QAGA,KADA,IAAIC,IAAmC1V,aACnBqV,OAAAA,cAAAA,KAClB,KADG,WACoB3D,SAAMR,WAANQ,cAAAA;YAAlB,IAAM/H;YAEPA,aAAoBlW,MACoB,SAAxCgiB,EAAkBjyB,IAAImmB,EAASlgB,SAE/BisB,IAAmCA,EAAiCzW,IAClE0K,EAASlgB;;QAMjB,IAAI8rB,IAAkBE;QACtB,OAAO/0B,KAAK8yB,GACTU,WAAWP,GAAa+B,GACxB3sB,MAAK4sB,SAAAA;mBACJA,EAAgB9uB,SAAQ,SAAC4C,GAAK2K;gBAChB,SAARA,KAAgBA,aAAe3B,OACjC8iB,IAAkBA,EAAgBrZ,GAAOzS,GAAK2K;iBAG3CmhB;;;;IChSb/0B,WACW0a,GACAmG,GACAuU,GACAC;QAHAn1B,gBAAAwa,GACAxa,iBAAA2gB,aACAuU,aACAC;;kBAGXr1B,SACE0a,GACA4a;QAKA,KAHA,IAAIF,IAAY5V,MACZ6V,IAAc7V,aAEM8V,IAAAA,EAAa3U,YAAb2U,cAAAA;YAAnB,IAAMpS;YACT,QAAQA,EAAU5C;cAChB;gBACE8U,IAAYA,EAAU3W,IAAIyE,EAAUtP,IAAI3K;gBACxC;;cACF;gBACEosB,IAAcA,EAAY5W,IAAIyE,EAAUtP,IAAI3K;;;;QAOlD,OAAO,IAAIssB,EACT7a,GACA4a,EAAazU,WACbuU,GACAC;;;ICZJr1B,WACU8P,GACR0lB;QAFFx1B;QACUE,qBAAA4P,GAGJ0lB,MACFA,EAAqBC,KAAwB7a,SAAAA;YAC3C1a,OAAAA,EAAKw1B,GAAiB9a;WACxB1a,KAAKy1B,KAAyB/a,SAAAA;YAC5B4a,OAAAA,EAAqBI,GAAoBhb;;;WAIvC5a,iBAAAA,SACN61B;QAGA,OADA31B,KAAK4P,gBAAgB/K,KAAK+wB,IAAID,GAAuB31B,KAAK4P,gBACnD5P,KAAK4P;OAGd9P,mBAAAA;QACE,IAAM+1B,MAAc71B,KAAK4P;QAIzB,OAHI5P,KAAKy1B,MACPz1B,KAAKy1B,GAAuBI,IAEvBA;;;;;wEA9BTC,UAAiD;;;;;;;;;;;;;;;;;;SCTjDh2B;IAAAA;IACEE,KAAK+1B,UAAU,IAAI/zB,SAAQ,SAACC,GAAsBgwB;QAChDjyB,EAAKiC,UAAUA,GACfjC,EAAKiyB,SAASA;;;ICclBnyB;;;;IAImBk2B;;;;IAIAC;;;;;;IAMAC;;;;UAKAC;;;;;UAMAC;yBAXAF,2BAKAC,2BAMAC;kBArBAJ,aAIAC,aAMAC,aAKAC,aAMAC,GA9BnBp2B,UAAgC,GAChCA,UAAsD;;QAEtDA,UAA0BwE,KAAKC,OA6B7BzE,KAAKq2B;;;;;;;;kBAUPv2B,oBAAAA;QACEE,KAAKs2B,KAAgB;;;;;;IAOvBx2B,iBAAAA;QACEE,KAAKs2B,KAAgBt2B,KAAKo2B;;;;;;;IAQ5Bt2B,iBAAAA,SAAc+Y;QAAd/Y;;gBAEEE,KAAKu2B;;;QAIL,IAAMC,IAA2B3xB,KAAKC,MACpC9E,KAAKs2B,KAAgBt2B,KAAKy2B,OAItBC,IAAe7xB,KAAK+wB,IAAI,GAAGpxB,KAAKC,QAAQzE,KAAK22B,KAG7CC,IAAmB/xB,KAAK+wB,IAC5B,GACAY,IAA2BE;;gBAGzBE,IAAmB,KACrBlR,GAtGU,sBAwGR,qBAAmBkR,0BACD52B,KAAKs2B,kCACCE,4BACLE;QAIvB12B,KAAK62B,KAAe72B,KAAKg2B,GAAMc,GAC7B92B,KAAKi2B,IACLW,IACA;mBACE52B,EAAK22B,KAAkBnyB,KAAKC,OACrBoU;;;;QAMX7Y,KAAKs2B,MAAiBt2B,KAAKm2B,IACvBn2B,KAAKs2B,KAAgBt2B,KAAKk2B,OAC5Bl2B,KAAKs2B,KAAgBt2B,KAAKk2B,KAExBl2B,KAAKs2B,KAAgBt2B,KAAKo2B,OAC5Bp2B,KAAKs2B,KAAgBt2B,KAAKo2B;OAI9Bt2B,iBAAAA;QAC4B,SAAtBE,KAAK62B,OACP72B,KAAK62B,GAAaE,MAClB/2B,KAAK62B,KAAe;OAIxB/2B,qBAAAA;QAC4B,SAAtBE,KAAK62B,OACP72B,KAAK62B,GAAaN,UAClBv2B,KAAK62B,KAAe;;mFAKhB/2B,iBAAAA;QACN,QAAQ+E,KAAKmyB,WAAW,MAAOh3B,KAAKs2B;;;IC5IxCx2B;QACEE,UAAgC,IAAIi3B;;WAEpCn3B,iBAAAA,SACEmzB,GACAiE;QAGA,OADAl3B,KAAKm3B,GAAsB5Y,IAAI2Y,IACxBlF,GAAmB/vB;OAG5BnC,iBAAAA,SACEmzB,GACAxqB;QAEA,OAAOupB,GAAmB/vB,QACxBjC,KAAKm3B,GAAsB3D,WAAW/qB;;;IAU5C3I;QACUE,aAAQ;;;eAKhBF,kBAAAA,SAAIo3B;QAEF,IAAMzuB,IAAeyuB,EAAexiB,KAC9B0iB,IAAaF,EAAe/L,KAC5BkM,IACJr3B,KAAKwG,MAAMiC,MACX,IAAI+V,GAAwBtX,EAAanB,IACrCuxB,KAASD,EAAgB/Y,IAAI8Y;QAEnC,OADAp3B,KAAKwG,MAAMiC,KAAgB4uB,EAAgB9Y,IAAI6Y,IACxCE;OAGTx3B,kBAAAA,SAAIo3B;QACF,IAAMzuB,IAAeyuB,EAAexiB,KAC9B0iB,IAAaF,EAAe/L,KAC5BkM,IAAkBr3B,KAAKwG,MAAMiC;QACnC,OAAO4uB,KAAmBA,EAAgB/Y,IAAI8Y;OAGhDt3B,yBAAAA,SAAW2I;QAIT,QAFEzI,KAAKwG,MAAMiC,MACX,IAAI+V,GAAwBtX,EAAanB,IACxBoB;;;IC7CrBrH,WAAoBy3B;kBAAAA;;WAEpBz3B,mBAAAA;QAEE,OADAE,KAAKu3B,MApBM,GAqBJv3B,KAAKu3B;cAGdz3B;;;;;QAKE,OAAO,IAAI03B,EAAkB;cAG/B13B;;QAEE,OAAO,IAAI03B,GAAkB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;sEC+WjBC,GAA4BzI;;;IAG1C,OAAkB,gCAAXA,EAAEttB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ICpUT5B,WACmB43B,GACRzB,GACA0B,GACQ9e,GACA+e;kBAJAF,aACRzB,aACA0B,GACQ33B,UAAA6Y,aACA+e,GAPnB53B,UAA4B,IAAI63B;QAmFhC73B,YAAOA,KAAK83B,GAAS/B,QAAQhzB,KAAKg1B,KAAK/3B,KAAK83B,GAAS/B;;;;QAvEnD/1B,KAAK83B,GAAS/B,QAAQiC,OAAM1F,SAAAA;;;;;;;;;;;;;;;;kBAiB9BxyB,SACE43B,GACAzB,GACAgC,GACApf,GACA+e;QAEA,IACMM,IAAY,IAAIC,EACpBT,GACAzB,GAHiBzxB,KAAKC,QAAQwzB,GAK9Bpf,GACA+e;QAGF,OADAM,EAAU/Z,MAAM8Z,IACTC;;;;;;IAODp4B,oBAAAA,SAAMm4B;QAANn4B;QACNE,KAAKo4B,KAAcC,YAAW;YAAMr4B,OAAAA,EAAKs4B;YAAsBL;;;;;;IAOjEn4B,iBAAAA;QACE,OAAOE,KAAKs4B;;;;;;;;;IAUdx4B,qBAAAA,SAAOy4B;QACoB,SAArBv4B,KAAKo4B,OACPp4B,KAAKw4B,gBACLx4B,KAAK83B,GAAS7F,OACZ,IAAI7uB,EACFhD,EAAKE,WACL,yBAAyBi4B,IAAS,OAAOA,IAAS;OAQlDz4B,iBAAAA;QAAAA;QACNE,KAAK03B,GAAWe,IAAiB;YACN,OAAA,SAArBz4B,EAAKo4B,MACPp4B,EAAKw4B,gBACEx4B,EAAK6Y,KAAK9V,MAAKwL,SAAAA;gBACbvO,OAAAA,EAAK83B,GAAS71B,QAAQsM;mBAGxBvM,QAAQC;;OAKbnC,2BAAAA;QACmB,SAArBE,KAAKo4B,OACPp4B,KAAK43B,GAAgB53B,OACrBw4B,aAAax4B,KAAKo4B,KAClBp4B,KAAKo4B,KAAc;;;IAuCvBt4B;QAAAA;;gBAhCAE,UAAiCgC,QAAQC;;;QAIzCjC,UAAuCgC,QAAQC;;;QAI/CjC;;;QAIAA,UAA8D;;QAG9DA,UAAwB;;;QAIxBA;;QAGAA,UAAoC;;QAGpCA,UAAkB,IAAI04B,GAAmB14B;;;;QAKzCA,UAA4B;YAAYA,OAAAA,EAAK24B,GAAQC;;QAGnD,IAAMC,IAASrvB,GAAgBC,KAAcovB;QACzCA,KAA6C,qBAA5BA,EAAOC,oBAC1BD,EAAOC,iBAAiB,oBAAoB94B,KAAK+4B;;WAMrDC;;;aAAAA;YACE,OAAOh5B,KAAKi5B;;;;;;;;;IAOdn5B,iBAAAA,SAAoC+Y;;QAElC7Y,KAAKk5B,QAAQrgB;;;;;;IAOf/Y,iBAAAA,SACE+Y;QAEA7Y,KAAKm5B;;QAELn5B,KAAKo5B,GAAgBvgB;;;;;;IAOf/Y,iBAAAA,SACN+Y;QAGA,OADA7Y,KAAKm5B,MACEn5B,KAAKo5B,GAAgBvgB;;;;;;;;;qBAU9B/Y,SAAiC+Y;;;;;;2BAC/B7Y,KAAKm5B,MACAn5B,KAAKi5B,4BACRj5B,KAAKi5B,UACCJ,IAASrvB,GAAgBC,KAAcovB,WAE3CA,EAAOQ,oBAAoB,oBAAoBr5B,KAAK+4B;oCAEhD/4B,KAAKs5B,GAAyBzgB;;;;;;;;;;;;;;;IAQxC/Y,sBAAAA,SAA2B+Y;QAEzB,OADA7Y,KAAKm5B,MACDn5B,KAAKi5B,KAEA,IAAIj3B,SAAWC,SAAAA,UAEjBjC,KAAKo5B,GAAgBvgB;;;;;;;;;;IAW9B/Y,iBAAAA,SAAiB+Y;QAAjB/Y;QACEE,KAAKm5B,MAEDn5B,KAAKi5B,OAITj5B,KAAKu5B,KAAgBv5B,KAAKu5B,GAAcx2B,MAAK;YAC3C,IAAM+0B,IAAW,IAAID,IACf2B,IAAaC;;;;;;+EAET5gB;;;6CACNif,EAAS71B,WACTjC,KAAK24B,GAAQtC;;;4BAEb,KAAIoB,kBAKF,MADAK,EAAS71B,WACH+sB;;+DAJNtJ,GA3TI,cA2Tc,4CAA4CsJ;4BAC9DhvB,KAAK24B,GAAQe,GAAcF;;;;;;;;YAQjC,OADAx5B,EAAKy4B,GAAiBe,IACf1B,EAAS/B;;OAIZj2B,iBAAAA,SAAmC+Y;QAAnC/Y,cACA65B,IAAU35B,KAAK45B,GAAK72B,MAAK;mBAC7B/C,EAAK65B,SACEhhB,IACJmf,OAAOlJ,SAAAA;;;;gBASN,MARA9uB,EAAKivB,KAAUH,GACf9uB,EAAK65B,SAEL3e,GAAS,8BADO4T,EAAMgL,SAAShL,EAAMvtB,WAAW;gBAM1CutB;gBAEP/rB,MAAKwL,SAAAA;uBACJvO,EAAK65B,SACEtrB;;;QAIb,OADAvO,KAAK45B,KAAOD,GACLA;;;;;;;IAQT75B,iBAAAA,SACEm2B,GACAgC,GACApf;QAHF/Y;QAKEE,KAAKm5B;;QAQDn5B,KAAK+5B,GAAexyB,QAAQ0uB,MAAY,MAC1CgC,IAAU;QAGZ,IAAMC,IAAYC,GAAiB6B,GACjCh6B,MACAi2B,GACAgC,GACApf,IACAohB,SAAAA;YACEj6B,OAAAA,EAAKk6B,GAAuBD;;QAGhC,OADAj6B,KAAKm6B,GAAkB9zB,KAAK6xB,IACrBA;OAGDp4B,iBAAAA;QACFE,KAAKivB,MACPrpB;;;;;;;;IAaJ9F,iBAAAA;;;;;qBAWAA;;;;;;2CAOIs6B,IAAcp6B,KAAK45B;;;;;;wBAEZQ,MAAgBp6B,KAAK45B;;;;;;;;;;;;;IAOhC95B,iBAAAA,SAAyBm2B;QACvB,KAAiBj2B,WAAAA,IAAAA,KAAKm6B,IAALn6B,cAAAA;YACf,SAAOi2B,OAAYA,GACjB;;QAGJ;;;;;;;;;IAUFn2B,iBAAAA,SAA6Bu6B;QAA7Bv6B;;gBAEE,OAAOE,KAAKs6B,KAAQv3B,MAAK;;YAEvB/C,EAAKm6B,GAAkBpsB,MAAK,SAACwsB,GAAGC;gBAAMD,OAAAA,EAAE5C,KAAe6C,EAAE7C;;YAEzD,KAAiB33B,WAAAA,IAAAA,EAAKm6B,IAALn6B,cAAAA;gBAAZ,IAAM6Y;gBAET,IADAA,EAAGke,0BACCsD,KAA+BxhB,EAAGod,OAAYoE,GAChD;;YAIJ,OAAOr6B,EAAKs6B;;;;;;IAOhBx6B,iBAAAA,SAAqBm2B;QACnBj2B,KAAK+5B,GAAe1zB,KAAK4vB;;8DAInBn2B,iBAAAA,SAAuB+Y;;QAE7B,IAAMrS,IAAQxG,KAAKm6B,GAAkB5yB,QAAQsR;QAE7C7Y,KAAKm6B,GAAkB9J,OAAO7pB,GAAO;;;;;;;;SAQzBi0B,GACdzL,GACAT;IAGA,IADArT,GArec,cAqeOqT,WAAQS,IACzByI,GAA4BzI,IAC9B,OAAO,IAAI5rB,EAAehD,EAAKgB,aAAgBmtB,WAAQS;IAEvD,MAAMA;;;;;;;;;;;;;;;;;;;IC1TRlvB;;;IAGW46B;;IAEAC;;;IAGAC;kBALAF,aAEAC,aAGAC;;kBA5BX96B,SAAqB+6B;QACnB,OAAO,IAAIC,EACTD,GACAC,EAAUC,IACVD,EAAUE;;;;AAVdF,SAAuC,GACvCA,QAA2C,SAC3CA,QAA2C,UAC3CA,QAAwD,IACxDA,QAAkE,KAUlEA,QAAqC,IAAIA,GACvCA,GAAUG,IACVH,GAAUC,IACVD,GAAUE;AAGIF,cAAsB,IAAIA,GACxCA,GAAUI,IACV,GACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ICJFp7B;;IAEYq7B,GACFC,GACRC;QAFUr7B,mBAAAm7B,aACFC;;;;;;;QApBVp7B,UAA+B,IAAIub,GACjCtW;;;QAKFjF,UAA2B,IAAIs7B,IAA4BC,SAAAA;YACzDA,OAAAA,EAAEttB;;;;;;;QAQJjO,UAAuCsF,EAAgByB,OAYrD/G,KAAK+yB,KAAgBoI,EAAYK,GAAiBH,IAClDr7B,KAAKy7B,KAAkBN,EAAYO,MACnC17B,KAAK27B,KAAcR,EAAYS,MAC/B57B,KAAK67B,KAAiB,IAAIC,GACxB97B,KAAKy7B,IACLz7B,KAAK+yB,IACL/yB,KAAKm7B,YAAYY;QAEnB/7B,KAAKo7B,GAAYY,GAAsBh8B,KAAK67B;;;WAI9C/7B,oBAAAA;QACE,OAAOkC,QAAQC;;;;;;;;;;qBAWjBnC,SAAuB+B;;;;;;2BACjBo6B,IAAmBj8B,KAAK+yB,IACxBmJ,IAAoBl8B,KAAK67B,oBAER77B,KAAKm7B,YAAYgB,eACpC,sBACA,aACAC,SAAAA;;;wBAGE,IAAIC;wBACJ,OAAOr8B,EAAK+yB,GACTuJ,GAAsBF,GACtB/zB,MAAKk0B,SAAAA;mCACJF,IAAaE,GAEbN,IAAmBj8B,EAAKm7B,YAAYK,GAAiB35B;;;4BAIrDq6B,IAAoB,IAAIJ,GACtB97B,EAAKy7B,IACLQ,GACAj8B,EAAKm7B,YAAYY,OAEZE,EAAiBK,GAAsBF;4BAE/C/zB,MAAKm0B,SAAAA;4BAOJ,KANA,IAAMC,IAA6B,IAC7BC,IAA2B,IAG7BC,IAAcrd,aAEE+c;;8BAAAA,cAAAA,KAAY;gCAA3B,IAAMrL;gCACTyL,EAAgBp2B,KAAK2qB,EAAMV;gCAC3B,KAAuBU,WAAAA,IAAAA,EAAMR,WAANQ,cAAAA;oCAAlB,IAAM/H;oCACT0T,IAAcA,EAAYpe,IAAI0K,EAASlgB;;;4BAI3C,KAAoByzB,WAAAA,OAAAA,cAAAA,KAAY;gCAA3B,IAAMxL;gCACT0L,EAAcr2B,KAAK2qB,EAAMV;gCACzB,KAAuBU,WAAAA,IAAAA,EAAMR,WAANQ,cAAAA;oCAAlB,IAAM/H;oCACT0T,IAAcA,EAAYpe,IAAI0K,EAASlgB;;;;;wDAM3C,OAAOmzB,EACJU,GAAaR,GAAKO,GAClBt0B,MAAKw0B,SAAAA;;oCAEFC,IAAAD;oCACAE,IAAAN;oCACAO,IAAAN;;;;;;;oBAWd,OA/DMnuB,gCA2DNvO,KAAK+yB,KAAgBkJ,GACrBj8B,KAAK67B,KAAiBK,GACtBl8B,KAAKo7B,GAAYY,GAAsBh8B,KAAK67B;oBAErCttB;;;;;yEAITzO,iBAAAA,SAAW0wB;QAAX1wB,IAOMm9B,aANExyB,IAAiBnG,EAAUG,OAC3BoJ,IAAO2iB,EAAU9D,QACrB,SAAC7e,GAAMijB;YAAMjjB,OAAAA,EAAK0Q,IAAIuS,EAAE/nB;YACxBuW;QAKF,OAAOtf,KAAKm7B,YACTgB,eAAe,2BAA2B,cAAaC,SAAAA;YAI/Cp8B,OAAAA,EAAK67B,GAAee,GAAaR,GAAKvuB,GAAMxF,MAAKkY,SAAAA;gBACtD0c,IAAe1c;gBASf;;;;;;gBAFA,IAAMgQ,IAA4B,WAEXC,OAAAA,cAAAA,KAAW;oBAA7B,IAAMvH,UACHtY,IAAYsY,EAASiU,GACzBD,EAAan6B,IAAImmB,EAASlgB;oBAEX,QAAb4H;;;;oBAIF4f,EAAclqB,KACZ,IAAI0M,GACFkW,EAASlgB,KACT4H,GACAiF,GAAiBjF,EAAU8D,MAAerK,WAC1CwH,GAAaD;;gBAMrB,OAAO3R,EAAK+yB,GAAcoK,GACxBf,GACA3xB,GACA8lB,GACAC;;YAILztB,MAAKiuB,SAAAA;YACJ,IAAM3Q,IAAU2Q,EAAMoM,GAAwBH;YAC9C,OAAO;gBAAE3M,SAASU,EAAMV;gBAAS+M,IAAAhd;;;;;;;;;;;;;;;;;;IAkBvCvgB,iBAAAA,SACE4wB;QADF5wB;QAGE,OAAOE,KAAKm7B,YAAYgB,eACtB,qBACA,sBACAC,SAAAA;YACE,IAAMkB,IAAW5M,EAAYM,MAAMnjB,QAC7B0vB,IAAiBv9B,EAAKy7B,GAAgB+B,GAAgB;gBAC1DC;;YAEF,OAAOz9B,EAAK+yB,GACT2K,GAAiBtB,GAAK1L,EAAYM,OAAON,EAAYQ,aACrD7oB,MAAK;gBACJrI,OAAAA,EAAK29B,GAA4BvB,GAAK1L,GAAa6M;gBAEpDl1B,MAAK;gBAAMk1B,OAAAA,EAAettB,MAAMmsB;gBAChC/zB,MAAK;gBAAMrI,OAAAA,EAAK+yB,GAAc6K,GAAwBxB;gBACtD/zB,MAAK;gBAAMrI,OAAAA,EAAK67B,GAAee,GAAaR,GAAKkB;;;;;;;;;;IAW1Dx9B,iBAAAA,SAAYwwB;QAAZxwB;QACE,OAAOE,KAAKm7B,YAAYgB,eACtB,gBACA,sBACAC,SAAAA;YACE,IAAIyB;YACJ,OAAO79B,EAAK+yB,GACT+K,GAAoB1B,GAAK9L,GACzBjoB,MAAM2oB,SAAAA;uBA1WU3tB,GA2WM,SAAV2tB,IACX6M,IAAe7M,EAAMnjB,QACd7N,EAAK+yB,GAAcgL,GAAoB3B,GAAKpL;gBAEpD3oB,MAAK;gBACGrI,OAAAA,EAAK+yB,GAAc6K,GAAwBxB;gBAEnD/zB,MAAK;gBACGrI,OAAAA,EAAK67B,GAAee,GAAaR,GAAKyB;;;;;;;;IAUvD/9B,iBAAAA;QAAAA;QACE,OAAOE,KAAKm7B,YAAYgB,eACtB,uCACA,aACAC,SAAAA;YACSp8B,OAAAA,EAAK+yB,GAAciL,GAAgC5B;;;wEAMhEt8B,iBAAAA;QAAAA;QACE,OAAOE,KAAKm7B,YAAYgB,eACtB,yBACA,aACAC,SAAAA;YACSp8B,OAAAA,EAAK+yB,GAAckL,GAAmB7B;;;;;;;;IAUnDt8B,iBAAAA,SAAmBoxB;QAAnBpxB;QACE,OAAOE,KAAKm7B,YAAYgB,eACtB,yBACA,sBACAC,SAAAA;YACSp8B,OAAAA,EAAK+yB,GAAcmL,GAAmB9B,GAAKlL;;;;;;;IASxDpxB,iBAAAA;QAAAA;QACE,OAAOE,KAAKm7B,YAAYgB,eACtB,oCACA,aACAC,SAAAA;YAAOp8B,OAAAA,EAAK27B,GAAYwC,GAA6B/B;;;;;;;;;;;IAYzDt8B,iBAAAA,SAAiBilB;QAAjBjlB,cACQs+B,IAAgBrZ,EAAYpK,IAC9B0jB,IAA2Br+B,KAAKs+B;QAEpC,OAAOt+B,KAAKm7B,YACTgB,eAAe,sBAAsB,sBAAqBC,SAAAA;YACzD,IAAMmB,IAAiBv9B,EAAKy7B,GAAgB+B,GAAgB;gBAC1DC;;;wBAIFY,IAA2Br+B,EAAKs+B;YAEhC,IAAM3L,IAAW;YACjB5N,EAAY7D,GAAc/a,SAAQ,SAAC8Z,GAAQzF;gBACzC,IAAM+jB,IAAgBF,EAAyBv7B,IAAI0X;gBACnD,IAAK+jB,GAAL;;;;oBAOA5L,EAAStsB,KACPrG,EAAK27B,GACF6C,GAAmBpC,GAAKnc,EAAO0B,IAAkBnH,GACjDnS,MAAK;wBACGrI,OAAAA,EAAK27B,GAAY8C,GACtBrC,GACAnc,EAAOwB,IACPjH;;oBAKR,IAAMK,IAAcoF,EAAOpF;;wCAE3B,IAAIA,EAAY8H,OAAwB,GAAG;wBACzC,IAAM+b,IAAgBH,EACnBI,GAAgB9jB,GAAaujB,GAC7BQ,GAAmBxC,EAAIyC;wBAC1BR,IAA2BA,EAAyB7iB,GAClDhB,GACAkkB;;;wBAMAI,EAAWC,GACTR,GACAG,GACAze,MAGF0S,EAAStsB,KACPrG,EAAK27B,GAAYqD,GAAiB5C,GAAKsC;;;;YAM/C,IAAIO,IAAclgB,MACdmgB,IAAc5f;;;;;wBAiElB,IAhEAyF,EAAY3D,GAAgBjb,SAAQ,SAAC4C,GAAK2K;gBACxCwrB,IAAcA,EAAY3gB,IAAIxV;;;;YAKhC4pB,EAAStsB,KACPk3B,EAAe/J,WAAW4I,GAAK8C,GAAa72B,MAAK40B,SAAAA;gBAC/ClY,EAAY3D,GAAgBjb,SAAQ,SAAC4C,GAAK2K;oBACxC,IAAMyrB,IAAclC,EAAan6B,IAAIiG;;;;;wCAOnC2K,aAAeY,MACfZ,EAAIlC,QAAQhM,QAAQF,EAAgByB;;;;oBAKpCw2B,EAAe6B,GAAYr2B,GAAKq1B,IAChCa,IAAcA,EAAYzjB,GAAOzS,GAAK2K,MAEvB,QAAfyrB,KACAzrB,EAAIlC,QAAQrE,EAAUgyB,EAAY3tB,WAAW,KACG,MAA/CkC,EAAIlC,QAAQrE,EAAUgyB,EAAY3tB,YACjC2tB,EAAY9oB,oBAMdknB,EAAe8B,GAAS3rB,GAAK0qB;oBAC7Ba,IAAcA,EAAYzjB,GAAOzS,GAAK2K,MAEtCgS,GAhgBA,cAkgBE,uCACA3c,GACA,sBACAo2B,EAAY3tB,SACZ,mBACAkC,EAAIlC;oBAIJuT,EAAY1D,GAAuB/C,IAAIvV,MACzC4pB,EAAStsB,KACPrG,EAAKm7B,YAAYmE,GAAkBC,GACjCnD,GACArzB;;mBAYPq1B,EAAc54B,QAAQF,EAAgByB,QAAQ;gBACjD,IAAMy4B,IAAsBx/B,EAAK27B,GAC9BwC,GAA6B/B,GAC7B/zB,MAAKo3B,SAAAA;oBAQGz/B,OAAAA,EAAK27B,GAAY+D,GACtBtD,GACAA,EAAIyC,IACJT;;gBAGNzL,EAAStsB,KAAKm5B;;YAGhB,OAAOxN,GAAmBa,GAAQF,GAC/BtqB,MAAK;gBAAMk1B,OAAAA,EAAettB,MAAMmsB;gBAChC/zB,MAAK;gBACGrI,OAAAA,EAAK67B,GAAepI,GACzB2I,GACA6C;;YAIPl8B,MAAKk8B,SAAAA;mBACJj/B,EAAKs+B,KAAqBD,GACnBY;;;;;;;;;;;;;;WAeLn/B,SACNy+B,GACAG,GACAze;;QAQA,OANA5c,GACEq7B,EAAc7jB,YAAY8H,OAAwB,IAKI,MAApD4b,EAAc1jB,YAAY8H,SAU5B+b,EAAc/jB,GAAgBglB,MAC9BpB,EAAc5jB,GAAgBglB,OACf3/B,KAAK4/B,MAUpB3f,EAAOwB,GAAelb,OACtB0Z,EAAOyB,GAAkBnb,OACzB0Z,EAAO0B,GAAiBpb,OACT;;;;;;;;;;qBAMnBzG,SAA6B+/B;;;;;;uEAEnB7/B,KAAKm7B,YAAYgB,eACrB,0BACA,cACAC,SAAAA;wBACSpK,OAAAA,GAAmB7rB,QACxB05B,IACCC,SAAAA;4BACQ9N,OAAAA,GAAmB7rB,QACxB25B,EAAW5K,KACVnsB,SAAAA;gCACC/I,OAAAA,EAAKm7B,YAAYmE,GAAkBS,GACjC3D,GACA0D,EAAWtlB,UACXzR;gCAEJV,MAAK;gCACL2pB,OAAAA,GAAmB7rB,QACjB25B,EAAW3K,KACVpsB,SAAAA;oCACC/I,OAAAA,EAAKm7B,YAAYmE,GAAkBU,GACjC5D,GACA0D,EAAWtlB,UACXzR;;;;;;;;;;oBAShB,KAAI0uB,kBAOF,MAAMzI;;;;;+CAFNtJ,GAzpBQ,cAypBU,wCAAwCsJ;;;;oBAM9D,YAAyB6Q,OAAAA,cAAAA,KAApBI,UACGzlB,IAAWslB,EAAWtlB,UAEvBslB,EAAWnf,cACRwD,IAAankB,KAAKs+B,GAAmBx7B,IAAI0X;oBAOzCI,IAA+BuJ,EAAWxJ,IAC1CulB,IAAoB/b,EAAWgc,GACnCvlB;;oBAEF5a,KAAKs+B,KAAqBt+B,KAAKs+B,GAAmB9iB,GAChDhB,GACA0lB;;;;;;;;;;;;IAYRpgC,iBAAAA,SAAkBsgC;QAAlBtgC;QACE,OAAOE,KAAKm7B,YAAYgB,eACtB,2BACA,aACAC,SAAAA;8BACMgE,MACFA,KZtuBqB,IYwuBhBpgC,EAAK+yB,GAAcsN,GACxBjE,GACAgE;;;;;;;IAURtgC,iBAAAA,SAAaiJ;QAAbjJ;QACE,OAAOE,KAAKm7B,YAAYgB,eAAe,iBAAiB,aAAYC,SAAAA;YAC3Dp8B,OAAAA,EAAK67B,GAAe3H,GAAYkI,GAAKrzB;;;;;;;;;;;IAYhDjJ,iBAAAA,SAAeya;QAAfza;QACE,OAAOE,KAAKm7B,YACTgB,eAAe,mBAAmB,cAAaC,SAAAA;YAC9C,IAAIjY;YACJ,OAAOnkB,EAAK27B,GACT2E,GAAclE,GAAK7hB,GACnBlS,MAAMk4B,SAAAA;gBACDA,OAAAA;;;;gBAIFpc,IAAaoc,GACNvO,GAAmB/vB,QAAQkiB,MAE3BnkB,EAAK27B,GAAY6E,GAAiBpE,GAAK/zB,MAAKmS,SAAAA;2BACjD2J,IAAa,IAAIpJ,GACfR,GACAC,oBAEA4hB,EAAIyC,KAEC7+B,EAAK27B,GACT8E,GAAcrE,GAAKjY,GACnB9b,MAAK;wBAAM8b,OAAAA;;;;YAKvBphB,MAAKohB,SAAAA;mBACqD,SAArDnkB,EAAKs+B,GAAmBx7B,IAAIqhB,EAAW3J,cACzCxa,EAAKs+B,KAAqBt+B,EAAKs+B,GAAmB9iB,GAChD2I,EAAW3J,UACX2J,IAEFnkB,EAAK0gC,GAAiBptB,IAAIiH,GAAQ4J,EAAW3J;YAExC2J;;;;;;;;IASbrkB,iBAAAA,SACEmzB,GACA1Y;QAEA,IAAMC,IAAWxa,KAAK0gC,GAAiB59B,IAAIyX;QAC3C,kBAAIC,IACKwX,GAAmB/vB,QACxBjC,KAAKs+B,GAAmBx7B,IAAI0X,MAGvBxa,KAAK27B,GAAY2E,GAAcrN,GAAa1Y;;;;;;;;;;IAYvDza,iBAAAA,SACE0a,GACAmmB;QAFF7gC,cAIQqkB,IAAankB,KAAKs+B,GAAmBx7B,IAAI0X,IAMzComB,IAAOD,IAA0B,cAAc;QACrD,OAAO3gC,KAAKm7B,YACTgB,eAAe,kBAAkByE,IAAMxE,SAAAA;YACjCuE,OAAAA,IAMI3O,GAAmB/vB,YALnBjC,EAAKm7B,YAAYmE,GAAkB1b,aACxCwY;YAOLr5B,MAAK;YACJ/C,EAAKs+B,KAAqBt+B,EAAKs+B,GAAmB3iB,OAAOnB,IACzDxa,EAAK0gC,GAAiBntB,OAAO4Q,EAAY5J;;;;;;;;;;;IAY/Cza,iBAAAA,SACEwgB,GACAugB;QAFF/gC,cAIM8a,IAA+BtV,EAAgByB,OAC/C+5B,IAAaxhB;QAEjB,OAAOtf,KAAKm7B,YAAYgB,eAAe,iBAAiB,aAAYC,SAAAA;YAC3Dp8B,OAAAA,EAAKsgC,GAAclE,GAAK9b,EAAMrI,MAClC5P,MAAK8b,SAAAA;gBACJ,IAAIA,GAGF,OAFAvJ,IACEuJ,EAAWvJ,8BACN5a,EAAK27B,GACToF,GAA2B3E,GAAKjY,EAAW3J,UAC3CnS,MAAKkG,SAAAA;oBACJuyB,IAAavyB;;gBAIpBlG,MAAK;gBACJrI,OAAAA,EAAKo7B,GAAY5G,GACf4H,GACA9b,GACAugB,IACIjmB,IACAtV,EAAgByB,OACpB85B,IAAqBC,IAAaxhB;gBAGrCjX,MAAKyY,SAAAA;;oBACKA,WAAAA;oBAAWkgB,IAAAF;;;;OAKpBhhC,iBAAAA,SACNs8B,GACA1L,GACA6M;QAHMz9B,cAKAkxB,IAAQN,EAAYM,OACpBiQ,IAAUjQ,EAAMnjB,QAClBqzB,IAAelP,GAAmB/vB;QAiCtC,OAhCAg/B,EAAQ96B,SAAQsqB,SAAAA;YACdyQ,IAAeA,EACZ74B,MAAK;gBACGk1B,OAAAA,EAAejK,GAAS8I,GAAK3L;gBAErCpoB,MAAM84B,SAAAA;gBACL,IAAIztB,IAAMytB,GACJC,IAAa1Q,EAAYS,GAAYruB,IAAI2tB;gBAl5B9BptB,GAo5BA,SAAf+9B,MAGG1tB,KAAOA,EAAIlC,QAAQrE,OAAyB,QAC/CuG,IAAMsd,EAAM5c,GAAsBqc,GAAQ/c,GAAKgd;;;;gBAc7C6M,EAAe8B,GAAS3rB,GAAKgd,EAAYO;;aAK5CiQ,EAAa74B,MAAK;YACvBrI,OAAAA,EAAK+yB,GAAcgL,GAAoB3B,GAAKpL;;OAIhDlxB,iBAAAA,SAAeuhC;QAAfvhC;QACE,OAAOE,KAAKm7B,YAAYgB,eACtB,mBACA,sBACAC,SAAAA;YAAOiF,OAAAA,EAAiBC,GAAQlF,GAAKp8B,EAAKs+B;;;;;;;;;;;;;;;;;;;;;GAgIzC7E,UAAe8H,GACpBjP;;;YAEA,IACEA,EAAIhxB,SAASlB,EAAKW,uBCvkCpB,gIDwkCEuxB,EAAI/wB,SAIJ,MAAM+wB;mBAFN5M,GAjiCY,cAiiCM;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;YAj9BkC;;;IEhHxD5lB;;QAEEE,UAAoB,IAAIwe,GAAUgjB,GAAaC;;QAG/CzhC,UAAuB,IAAIwe,GAAUgjB,GAAaE;;;WAGlD5hC,gBAAAA;QACE,OAAOE,KAAK2hC,GAAUv4B;;wEAIxBtJ,iBAAAA,SAAaiJ,GAAkBmnB;QAC7B,IAAM0R,IAAM,IAAIJ,GAAaz4B,GAAKmnB;QAClClwB,KAAK2hC,KAAY3hC,KAAK2hC,GAAUpjB,IAAIqjB,IACpC5hC,KAAK6hC,KAAe7hC,KAAK6hC,GAAatjB,IAAIqjB;;uEAI5C9hC,iBAAAA,SAAc+N,GAAsBqiB;QAApCpwB;QACE+N,EAAK1H,SAAQ4C,SAAAA;YAAO/I,OAAAA,EAAK+/B,GAAah3B,GAAKmnB;;;;;;;IAO7CpwB,iBAAAA,SAAgBiJ,GAAkBmnB;QAChClwB,KAAK8hC,GAAU,IAAIN,GAAaz4B,GAAKmnB;OAGvCpwB,iBAAAA,SAAiB+N,GAAsBqiB;QAAvCpwB;QACE+N,EAAK1H,SAAQ4C,SAAAA;YAAO/I,OAAAA,EAAKggC,GAAgBj3B,GAAKmnB;;;;;;;IAOhDpwB,iBAAAA,SAAsBowB;QAAtBpwB,cACQiiC,IAAWz5B,EAAYgT,OACvB0mB,IAAW,IAAIR,GAAaO,GAAU7R,IACtC+R,IAAS,IAAIT,GAAaO,GAAU7R,IAAK,IACzCriB,IAAsB;QAK5B,OAJA7N,KAAK6hC,GAAaK,GAAe,EAACF,GAAUC,MAASL,SAAAA;YACnD5hC,EAAK8hC,GAAUF,IACf/zB,EAAKxH,KAAKu7B,EAAI74B;aAET8E;OAGT/N,iBAAAA;QAAAA;QACEE,KAAK2hC,GAAUx7B,SAAQy7B,SAAAA;YAAO5hC,OAAAA,EAAK8hC,GAAUF;;OAGvC9hC,iBAAAA,SAAU8hC;QAChB5hC,KAAK2hC,KAAY3hC,KAAK2hC,GAAUpuB,OAAOquB,IACvC5hC,KAAK6hC,KAAe7hC,KAAK6hC,GAAatuB,OAAOquB;OAG/C9hC,iBAAAA,SAAgBowB;QACd,IAAM6R,IAAWz5B,EAAYgT,OACvB0mB,IAAW,IAAIR,GAAaO,GAAU7R,IACtC+R,IAAS,IAAIT,GAAaO,GAAU7R,IAAK,IAC3CriB,IAAOyR;QAIX,OAHAtf,KAAK6hC,GAAaK,GAAe,EAACF,GAAUC,MAASL,SAAAA;YACnD/zB,IAAOA,EAAK0Q,IAAIqjB,EAAI74B;aAEf8E;OAGT/N,iBAAAA,SAAYiJ;QACV,IAAM64B,IAAM,IAAIJ,GAAaz4B,GAAK,IAC5Bo5B,IAAWniC,KAAK2hC,GAAUS,GAAkBR;QAClD,OAAoB,SAAbO,KAAqBp5B,EAAIvD,QAAQ28B,EAASp5B;;;IAKnDjJ,WACSiJ,GACAs5B;QADAriC,WAAA+I,aACAs5B;;;kBAITviC,SAAoBkH,GAAoBC;QACtC,OACEqB,EAAYvC,EAAWiB,EAAK+B,KAAK9B,EAAM8B,QACvC9D,GAAoB+B,EAAKq7B,IAAiBp7B,EAAMo7B;;4CAKpDviC,SAAyBkH,GAAoBC;QAC3C,OACEhC,GAAoB+B,EAAKq7B,IAAiBp7B,EAAMo7B,OAChD/5B,EAAYvC,EAAWiB,EAAK+B,KAAK9B,EAAM8B;;;;;;;;;;;;;;;;;;;;;;;;;;;SClG7Bu5B,GAAeC,GAAsB7T;IACnD,IAAoB,MAAhBA,EAAK/oB,QACP,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,cAAY+hC,4DAEVC,GAAa9T,EAAK/oB,QAAQ,cAC1B;;;;;;;;;aAYQ88B,GACdF,GACA7T,GACAgU;IAEA,IAAIhU,EAAK/oB,WAAW+8B,GAClB,MAAM,IAAIt/B,EACRhD,EAAKI,kBACL,cAAY+hC,qBACVC,GAAaE,GAAc,cAC3B,2BACAF,GAAa9T,EAAK/oB,QAAQ,cAC1B;;;;;;;;;;aAaQg9B,GACdJ,GACA7T,GACAkU;IAEA,IAAIlU,EAAK/oB,SAASi9B,GAChB,MAAM,IAAIx/B,EACRhD,EAAKI,kBACL,cAAY+hC,8BACVC,GAAaI,GAAiB,cAC9B,2BACAJ,GAAa9T,EAAK/oB,QAAQ,cAC1B;;;;;;;;;;aAaQk9B,GACdN,GACA7T,GACAkU,GACAE;IAEA,IAAIpU,EAAK/oB,SAASi9B,KAAmBlU,EAAK/oB,SAASm9B,GACjD,MAAM,IAAI1/B,EACRhD,EAAKI,kBACL,cAAY+hC,6BAAmCK,cAC1CE,yCACHN,GAAa9T,EAAK/oB,QAAQ,cAC1B;;;;;;;;;;aA6BQo9B,GACdR,GACAniB,GACAjH,GACA6pB;IAEAC,GAAaV,GAAcniB,GAAS8iB,GAAQ/pB,kBAAsB6pB;;;;;;aAOpDG,GACdZ,GACAniB,GACAjH,GACA6pB;eAEIA,KACFD,GAAgBR,GAAcniB,GAAMjH,GAAU6pB;;;;;;aAQlCI,GACdb,GACAniB,GACAijB,GACAL;IAEAC,GAAaV,GAAcniB,GAASijB,eAAqBL;;;;;;aAO3CM,GACdf,GACAniB,GACAijB,GACAL;eAEIA,KACFI,GAAkBb,GAAcniB,GAAMijB,GAAYL;;;;;;;;;;SAgFtCO,GACdhB,GACAiB,GACAH,GACAI,GACAC;eAEID,cAlCJlB,GACAiB,GACAH,GACAI,GACAC;QAIA,KAFA,IAAMC,IAAgC,WAEpBD,OAAAA,cAAAA,KAAU;YAAvB,IAAM1pB;YACT,IAAIA,MAAQypB,GACV;YAEFE,EAAoBt9B,KAAKu9B,GAAiB5pB;;QAG5C,IAAM6pB,IAAoBD,GAAiBH;QAC3C,MAAM,IAAIrgC,EACRhD,EAAKI,kBACL,mBAAiBqjC,+BAA0CtB,wBACrDc,+BAAmCM,EAAoBv8B,KAAK;MAiBhEm7B,GACAiB,GACAH,GACAI,GACAC;;;;;;;;;;;;uDAgCN,UAAST,GACPV,GACAniB,GACAojB,GACAC;IAWA,MARa,aAATrjB,IACM0jB,GAAcL,KACJ,uBAATrjB,IACgB,mBAAVqjB,KAAgC,OAAVA,WAEtBA,MAAUrjB,IAGf;QACV,IAAM2jB,IAAcH,GAAiBH;QACrC,MAAM,IAAIrgC,EACRhD,EAAKI,kBACL,cAAY+hC,yBAA+BiB,wBACxBpjB,uBAAqB2jB;;;;;;;aAS9BD,GAAcL;IAC5B,OACmB,mBAAVA,KACG,SAAVA,MACCz6B,OAAOg7B,eAAeP,OAAWz6B,OAAOC,aACN,SAAjCD,OAAOg7B,eAAeP;;;oFAKZG,GAAiBH;IAC/B,eAAIA,GACF,OAAO;IACF,IAAc,SAAVA,GACT,OAAO;IACF,IAAqB,mBAAVA,GAIhB,OAHIA,EAAM99B,SAAS,OACjB89B,IAAWA,EAAMQ,UAAU,GAAG;IAEzBC,KAAKC,UAAUV;IACjB,IAAqB,mBAAVA,KAAuC,oBAAVA,GAC7C,OAAO,KAAKA;IACP,IAAqB,mBAAVA,GAAoB;QACpC,IAAIA,aAAiBW,OACnB,OAAO;QAEP,IAAMC;;iBAe2BZ;YACrC,IAAIA,EAAMrU,aAAa;gBACrB,IACMgC,IADgB,4BACQziB,KAAK80B,EAAMrU,YAAY3tB;gBACrD,IAAI2vB,KAAWA,EAAQzrB,SAAS,GAC9B,OAAOyrB,EAAQ;;YAGnB,OAAO;;QAtBH,OAAIiT,IACK,cAAYA,gBAEZ;;IAGN,OAAqB,qBAAVZ,IACT,eA1VX79B;;;SA6WgB0+B,GACd/B,GACAppB,GACA6pB;IAEA,eAAIA,GACF,MAAM,IAAI5/B,EACRhD,EAAKI,kBACL,cAAY+hC,6BAAmCW,GAAQ/pB;;;;;;aAU7CorB,GACdhC,GACArsB,GACAsuB;IAEAr+B,EAAQ+P,IAA0B,SAACnN,GAAKib;QACtC,IAAIwgB,EAAYj9B,QAAQwB,KAAO,GAC7B,MAAM,IAAI3F,EACRhD,EAAKI,kBACL,qBAAmBuI,8BAA2Bw5B,gCAE5CiC,EAAYp9B,KAAK;;;;;;;aAUXq9B,GACdlC,GACAniB,GACAjH,GACA6pB;IAEA,IAAMe,IAAcH,GAAiBZ;IACrC,OAAO,IAAI5/B,EACThD,EAAKI,kBACL,cAAY+hC,yBAA+BW,GAAQ/pB,4BAC7BiH,uBAAqB2jB;;;SAI/BW,GACdnC,GACAppB,GACA0D;IAEA,IAAIA,KAAK,GACP,MAAM,IAAIzZ,EACRhD,EAAKI,kBACL,cAAY+hC,yBAA+BW,GACzC/pB,yDACiD0D;;;qEAMhDqmB,GAAQyB;IACf,QAAQA;MACN,KAAK;QACH,OAAO;;MACT,KAAK;QACH,OAAO;;MACT,KAAK;QACH,OAAO;;MACT;QACE,OAAOA,IAAM;;;;;;aAOVnC,GAAamC,GAAa78B;IACjC,OAAU68B,UAAO78B,KAAiB,MAAR68B,IAAY,KAAK;;;;;;;;;;;;;;;;;;;8EC7cpCC;IACP,IAA0B,sBAAf76B,YACT,MAAM,IAAI3G,EACRhD,EAAKc,eACL;;;qFAMG2jC;IACP,KAAKr7B,GAAgBC,KAAcq7B,IACjC,MAAM,IAAI1hC,EACRhD,EAAKc,eACL;;;;;;;;;;IAiBJpB,WAAYilC;QACVF,MACA7kC,KAAKglC,KAAcD;;gCAGrBjlC,SAAwBwJ;QACtBm5B,GAA0B,yBAAyBwC,WAAW,IAC9DlC,GAAgB,yBAAyB,UAAU,GAAGz5B;QACtDu7B;QACA;YACE,OAAO,IAAIK,EAAK37B,EAAW0F,iBAAiB3F;UAC5C,OAAO0lB;YACP,MAAM,IAAI5rB,EACRhD,EAAKI,kBACL,kDAAkDwuB;;0BAKxDlvB,SAAsB6J;QAGpB,IAFA84B,GAA0B,uBAAuBwC,WAAW,IAC5DL,QACMj7B,aAAiBI,aACrB,MAAM06B,GAAkB,uBAAuB,cAAc,GAAG96B;QAElE,OAAO,IAAIu7B,EAAK37B,EAAW2F,eAAevF;OAG5C7J,uBAAAA;QAGE,OAFA2iC,GAA0B,iBAAiBwC,WAAW,IACtDJ,MACO7kC,KAAKglC,GAAY52B;OAG1BtO,2BAAAA;QAGE,OAFA2iC,GAA0B,qBAAqBwC,WAAW,IAC1DL,MACO5kC,KAAKglC,GAAYje;OAG1BjnB,uBAAAA;QACE,OAAO,kBAAkBE,KAAKoO,aAAa;OAG7CtO,sBAAAA,SAAQkF;QACN,OAAOhF,KAAKglC,GAAYx/B,QAAQR,EAAMggC;;;;;;;;;ICzDxCllC;;kBFkFAyiC,GACA3gC,GACAF,GACAyjC;YAEA,MAAMvjC,aAAiBwiC,UAAUxiC,EAAM+D,SElFnC,GFmFF,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,wFAEKgiC,GEvFL,GFuFuC;UE1FvC,GACA4C;QAKF,KAAK,IAAI3+B,IAAI,GAAGA,IAAI2+B,EAAWz/B,UAAUc,GAEvC,IADAs8B,GAAgB,aAAa,UAAUt8B,GAAG2+B,EAAW3+B,KACxB,MAAzB2+B,EAAW3+B,GAAGd,QAChB,MAAM,IAAIvC,EACRhD,EAAKI,kBACL;QAMNR,KAAKqlC,KAAgB,IAAIC,EAAkBF;;0BAa7CtlC;QACE,OAAO6H,EAAU49B;OAGnBzlC,sBAAAA,SAAQkF;QACN,MAAMA,aAAiB2C,IACrB,MAAM88B,GAAkB,WAAW,aAAa,GAAGz/B;QAErD,OAAOhF,KAAKqlC,GAAc7/B,QAAQR,EAAMqgC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;QAZH,IAAI19B,GACzC29B,EAAkB7tB,IAAWpQ;;;;;AAkBjC,IAAMm+B,KAAW,IAAIz6B,OAAO,uBCtD1BjL,SAA+B2lC;cAAAA;;IAQ/B3lC;eACE0B,aAAM;;WAFgCkkC,SAKxC5lC,iBAAAA,SAAiB6lC;QACf,yBAAIA,EAAQC,IAIL,yBAAID,EAAQC,KAMXD,EAAQE,GACZ,8EAKIF,EAAQE,GACZ;;;gBAIJ,OAlBEF,EAAQhzB,GAAUtM,KAAKs/B,EAAar+B,OAkB/B;OAGTxH,sBAAAA,SAAQkF;QACN,OAAOA,aAAiB8gC;;EA/BcJ;IAoCxC5lC;eACE0B,aAAM;;WAFyCkkC,SAKjD5lC,iBAAAA,SAAiB6lC;QACf,OAAO,IAAIjb,GAAeib,EAAar+B,MAAEyI,GAAyBya;OAGpE1qB,sBAAAA,SAAQkF;QACN,OAAOA,aAAiB+gC;;EAVuBL;IAejD5lC,WAA6BkmC;QAA7BlmC;gBACE0B,IAAAA,aAAM,uCADqBwkC;;WADeN,SAK5C5lC,iBAAAA,SAAiB6lC;;;;QAIf,IAAMM,IAAe,IAAIC,GACvB;YACEC;YACAC,YAAYpmC,KAAKylC;YACjBY;WAEFV,EAAQv2B,IACRu2B,EAAQl1B,YACRk1B,EAAQW,4BAEJC,IAAiBvmC,KAAKgmC,GAAUn+B,KACpCwI,SAAAA;YAAWm2B,OAAAA,GAAUn2B,GAAS41B;aAE1BQ,IAAa,IAAIn2B,GAA6Bi2B;QACpD,OAAO,IAAI7b,GAAeib,EAAQr+B,MAAOm/B;OAG3C3mC,sBAAAA,SAAQkF;;QAEN,OAAOhF,SAASgF;;EA5B0B0gC;IAiC5C5lC,WAAqBkmC;QAArBlmC;gBACE0B,IAAAA,aAAM,wCADawkC;;WADwBN,SAK7C5lC,iBAAAA,SAAiB6lC;;;;QAIf,IAAMM,IAAe,IAAIC,GACvB;YACEC;YACAC,YAAYpmC,KAAKylC;YACjBY;WAEFV,EAAQv2B,IACRu2B,EAAQl1B,YACRk1B,EAAQW,4BAEJC,IAAiBvmC,KAAKgmC,GAAUn+B,KACpCwI,SAAAA;YAAWm2B,OAAAA,GAAUn2B,GAAS41B;aAE1BQ,IAAa,IAAIj2B,GAA8B+1B;QACrD,OAAO,IAAI7b,GAAeib,EAAQr+B,MAAOm/B;OAG3C3mC,sBAAAA,SAAQkF;;QAEN,OAAOhF,SAASgF;;EA5B2B0gC;IAiC7C5lC,WAA6B4mC;QAA7B5mC;gBACE0B,IAAAA,aAAM,sCADqBklC;;WADqBhB,SAKlD5lC,iBAAAA,SAAiB6lC;QACf,IAAMM,IAAe,IAAIC,GACvB;YACEC;YACAC,YAAYpmC,KAAKylC;WAEnBE,EAAQv2B,IACRu2B,EAAQl1B,YACRk1B,EAAQW,4BAEJ51B,IAAU81B,GAAUxmC,KAAK0mC,IAAUT,IACnCU,IAAmB,IAAI11B,GAC3B00B,EAAQl1B,YACRC;QAEF,OAAO,IAAIga,GAAeib,EAAar+B,MAAEq/B;OAG3C7mC,sBAAAA,SAAQkF;;QAEN,OAAOhF,SAASgF;;EAzBgC0gC;IC7HlD5lC,WAAY6L,GAAkBC;QAI5B,IAHA62B,GAA0B,YAAYwC,WAAW,IACjDlC,GAAgB,YAAY,UAAU,GAAGp3B,IACzCo3B,GAAgB,YAAY,UAAU,GAAGn3B;SACpCg7B,SAASj7B,MAAaA,KAAY,MAAMA,IAAW,IACtD,MAAM,IAAIvI,EACRhD,EAAKI,kBACL,4DAA4DmL;QAGhE,KAAKi7B,SAASh7B,MAAcA,KAAa,OAAOA,IAAY,KAC1D,MAAM,IAAIxI,EACRhD,EAAKI,kBACL,+DAA+DoL;QAInE5L,KAAK6mC,KAAOl7B,GACZ3L,KAAK8mC,KAAQl7B;;WAMfD;;;;aAAAA;YACE,OAAO3L,KAAK6mC;;;;QAMdj7B;;;;aAAAA;YACE,OAAO5L,KAAK8mC;;;;QAGdhnC,sBAAAA,SAAQkF;QACN,OAAOhF,KAAK6mC,OAAS7hC,EAAM6hC,MAAQ7mC,KAAK8mC,OAAU9hC,EAAM8hC;;;;;;IAO1DhnC,gBAAAA,SAAWkF;QACT,OACEC,GAAoBjF,KAAK6mC,IAAM7hC,EAAM6hC,OACrC5hC,GAAoBjF,KAAK8mC,IAAO9hC,EAAM8hC;;KC9BtCC,KAAuB;IAI3BjnC,WACW4S,GACAC,GACAc;QAFAzT,YAAA0S,aACAC,GACA3S,uBAAAyT;;WAGX3T,iBAAAA,SAAYiJ,GAAkBiJ;QAC5B,IAAMwe,IAAY;QAWlB,OAVuB,SAAnBxwB,KAAK2S,KACP6d,EAAUnqB,KACR,IAAI0M,GAAchK,GAAK/I,KAAK0S,MAAM1S,KAAK2S,IAAWX,MAGpDwe,EAAUnqB,KAAK,IAAIoM,GAAY1J,GAAK/I,KAAK0S,MAAMV;QAE7ChS,KAAKyT,gBAAgB9N,SAAS,KAChC6qB,EAAUnqB,KAAK,IAAI8N,GAAkBpL,GAAK/I,KAAKyT,mBAE1C+c;;;IAMT1wB,WACW4S,GACAC,GACAc;QAFAzT,YAAA0S,aACAC,GACA3S,uBAAAyT;;WAGX3T,iBAAAA,SAAYiJ,GAAkBiJ;QAC5B,IAAMwe,IAAY,EAChB,IAAIzd,GAAchK,GAAK/I,KAAK0S,MAAM1S,KAAK2S,IAAWX;QAKpD,OAHIhS,KAAKyT,gBAAgB9N,SAAS,KAChC6qB,EAAUnqB,KAAK,IAAI8N,GAAkBpL,GAAK/I,KAAKyT;QAE1C+c;;;;;;;;;;;;;;;;;;;;;;;;;;GAyBX,UAASwW,GAAQpB;IACf,QAAQA;MACN;;cACA;;cACA;QACE;;MACF;MACA;QACE;;MACF;QACE,MAjGChgC;;;;;;;;;;;;;;;;;;;;;;;IA+IL9F,WACWmnC,GACA73B,GACAqB,GACA61B,GACT7yB,GACAd;QALS3S,gBAAAinC,aACA73B,GACApP,kBAAAyQ,GACAzQ,iCAAAsmC;;;mBAML7yB,KACFzT,KAAKknC,MAEPlnC,KAAKyT,kBAAkBA,KAAmB,IAC1CzT,KAAK2S,KAAYA,KAAa;;WAGhCrL;aAAAA;YACE,OAAOtH,KAAKinC,SAAS3/B;;;;QAGvB6+B;aAAAA;YACE,OAAOnmC,KAAKinC,SAASrB;;;;;0EAIvB9lC,iBAAAA,SAAYqnC;QACV,OAAO,IAAIjB,kCACJlmC,KAAKinC,WAAaE,IACvBnnC,KAAKoP,IACLpP,KAAKyQ,YACLzQ,KAAKsmC,2BACLtmC,KAAKyT,iBACLzT,KAAK2S;OAIT7S,iBAAAA,SAAqBwR;eACb81B,kBAAYpnC,KAAKsH,mCAAMqO,MAAMrE,IAC7Bq0B,IAAU3lC,KAAKqnC,GAAY;YAAE//B,MAAM8/B;YAAWf;;QAEpD,OADAV,EAAQ2B,GAAoBh2B,IACrBq0B;OAGT7lC,iBAAAA,SAAyBwR;eACjB81B,kBAAYpnC,KAAKsH,mCAAMqO,MAAMrE,IAC7Bq0B,IAAU3lC,KAAKqnC,GAAY;YAAE//B,MAAM8/B;YAAWf;;QAEpD,OADAV,EAAQuB,MACDvB;OAGT7lC,iBAAAA,SAAqB0G;;;QAGnB,OAAOxG,KAAKqnC,GAAY;YAAE//B;YAAiB++B;;OAG7CvmC,iBAAAA,SAAYy4B;QACV,IAAMgP,KACHvnC,KAAKsH,QAAQtH,KAAKsH,KAAK8B,MACpB,KACA,sBAAoBpJ,KAAKsH,KAAK7F;QACpC,OAAO,IAAI2B,EACThD,EAAKI,kBACL,cAAYR,KAAKinC,SAASb,+CACxB7N,IACAgP;;mFAKNznC,uBAAAA,SAASoR;QACP,kBACElR,KAAK2S,GAAUjG,MAAK4E,SAAAA;YAASJ,OAAAA,EAAUC,EAAWG;0BAClDtR,KAAKyT,gBAAgB/G,MAAK6E,SAAAA;YACxBL,OAAAA,EAAUC,EAAWI,EAAUD;;OAK7BxR,iBAAAA;;;QAGN,IAAKE,KAAKsH,MAGV,KAAK,IAAIb,IAAI,GAAGA,IAAIzG,KAAKsH,KAAK3B,QAAQc,KACpCzG,KAAKsnC,GAAoBtnC,KAAKsH,KAAKxE,IAAI2D;OAInC3G,iBAAAA,SAAoBsG;QAC1B,IAAuB,MAAnBA,EAAQT,QACV,MAAM3F,KAAK6lC,GAAY;QAEzB,IAAImB,GAAQhnC,KAAK4lC,OAAemB,GAAqBn/B,KAAKxB,IACxD,MAAMpG,KAAK6lC,GAAY;;;IAY3B/lC,WACmBsP,GACAk3B,GACjB71B;kBAFiBrB,GACApP,iCAAAsmC,GAGjBtmC,KAAKyQ,aACHA,KAAcjH,GAAgBC,KAAc+9B,GAAcp4B;;;WAI9DtP,iBAAAA,SAAasmC,GAAoB3C;QAC/B,IAAMkC,IAAU3lC,KAAKynC,iBAAkCrB;QACvDsB,GAAoB,uCAAuC/B,GAASlC;QACpE,IAAMkE,IAAaC,GAAYnE,GAAOkC;QAEtC,OAAO,IAAIkC,GACT,IAAI70B,GAAY20B;yBACC,MACjBhC,EAAQlyB;;sEAKZ3T,iBAAAA,SACEsmC,GACA3C,GACA3V;QAEA,IAAM6X,IAAU3lC,KAAKynC,sBAAuCrB;QAC5DsB,GAAoB,uCAAuC/B,GAASlC;QACpE,IAEI9wB,GACAc,GAHEk0B,IAAaC,GAAYnE,GAAOkC;QAKtC,IAAK7X,GAGE;YAGL,KAFA,IAAMga,IAAmC,WAETha,OAAAA,cAAAA,KAAY;gBAAvC,IAAMia,UACL72B;gBAEJ,IAAI62B,aAA6BC,IAC/B92B,IAAY62B,EAAkB1C,SACzB;oBAAA,IAAiC,mBAAtB0C,GAMhB,MA/SHniC;oBA0SGsL,IAAY+2B,GACV7B,GACA2B;;gBAQJ,KAAKpC,EAAQuC,SAASh3B,IACpB,MAAM,IAAI9N,EACRhD,EAAKI,kBACL,YAAU0Q;gBAITi3B,GAAkBL,GAAqB52B,MAC1C42B,EAAoBzhC,KAAK6K;;YAI7ByB,IAAY,IAAIoD,GAAU+xB,IAC1Br0B,IAAkBkyB,EAAQlyB,gBAAgBhM,QAAO8J,SAAAA;gBAC/CoB,OAAAA,EAAUy1B,GAAO72B,EAAUD;;eAnC7BqB,IAAY,IAAIoD,GAAU4vB,EAAQhzB,KAClCc,IAAkBkyB,EAAQlyB;QAqC5B,OAAO,IAAIo0B,GACT,IAAI70B,GAAY20B,IAChBh1B,GACAc;;oDAKJ3T,iBAAAA,SAAgBsmC,GAAoB3C;QAClC,IAAMkC,IAAU3lC,KAAKynC,oBAAqCrB;QAC1DsB,GAAoB,uCAAuC/B,GAASlC;QAEpE,IAAM4E,IAA8B,IAC9BV,IAAa,IAAIv0B;QACvBjN,EAAQs9B,IAAwB,SAAC16B,GAAKnH;YACpC,IAAM0F,IAAO2gC,GAAgC7B,GAAYr9B,IAEnDu/B,IAAe3C,EAAQ4C,GAAyBjhC;YACtD,IAAI1F,aAAiBkkC;;YAEnBuC,EAAehiC,KAAKiB,SACf;gBACL,IAAMkhC,IAAchC,GAAU5kC,GAAO0mC;gBAClB,QAAfE,MACFH,EAAehiC,KAAKiB,IACpBqgC,EAAWr0B,IAAIhM,GAAMkhC;;;QAK3B,IAAMC,IAAO,IAAI1yB,GAAUsyB;QAC3B,OAAO,IAAIK,GACTf,EAAWn0B,MACXi1B,GACA9C,EAAQlyB;;mEAKZ3T,iBAAAA,SACEsmC,GACA90B,GACA1P,GACA+mC;QAEA,IAAMhD,IAAU3lC,KAAKynC,oBAAqCrB,IACpDv4B,IAAO,EAAC+6B,GAAsBxC,GAAY90B,MAC1ClF,IAAS,EAACxK;QAEhB,IAAI+mC,EAAoBhjC,SAAS,KAAM,GACrC,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,cAAY4lC;QAKhB,KAAK,IAAI3/B,IAAI,GAAGA,IAAIkiC,EAAoBhjC,QAAQc,KAAK,GACnDoH,EAAKxH,KACHuiC,GACExC,GACAuC,EAAoBliC,MAGxB2F,EAAO/F,KAAKsiC,EAAoBliC,IAAI;;;QAQtC,KALA,IAAM4hC,IAA8B,IAC9BV,IAAa,IAAIv0B,IAId3M,IAAIoH,EAAKlI,SAAS,GAAGc,KAAK,KAAKA,GACtC,KAAK0hC,GAAkBE,GAAgBx6B,EAAKpH,KAAK;YAC/C,IAAMa,IAAOuG,EAAKpH,IACZ7E,IAAQwK,EAAO3F,IACf6hC,IAAe3C,EAAQ4C,GAAyBjhC;YACtD,IAAI1F,aAAiBkkC;;YAEnBuC,EAAehiC,KAAKiB,SACf;gBACL,IAAMkhC,IAAchC,GAAU5kC,GAAO0mC;gBAClB,QAAfE,MACFH,EAAehiC,KAAKiB,IACpBqgC,EAAWr0B,IAAIhM,GAAMkhC;;;QAM7B,IAAMC,IAAO,IAAI1yB,GAAUsyB;QAC3B,OAAO,IAAIK,GACTf,EAAWn0B,MACXi1B,GACA9C,EAAQlyB;;kDAKJ3T,iBAAAA,SACN8lC,GACAQ;QAEA,OAAO,IAAIF,GACT;YACEC,IAAAP;YACAQ,YAAAA;YACA9+B,MAAMK,EAAUyN;YAChBixB;WAEFrmC,KAAKoP,IACLpP,KAAKyQ,YACLzQ,KAAKsmC;;;;;;;;;IAWTxmC,iBAAAA,SACEsmC,GACA3C,GACAoF;QAYA,wBAZAA,SAMerC,GAAU/C,GAJTzjC,KAAKynC,GACnBoB,+CACAzC;;;;;;;;;;;;;;;;;SAqBUI,GACd/C,GACAkC;IAEA,IAAImD,GAAoBrF,IAEtB,OADAiE,GAAoB,4BAA4B/B,GAASlC,IAClDmE,GAAYnE,GAAOkC;IACrB,IAAIlC,aAAiBiC;;;;;;;;;;IAO1B,OA2EJ,SACE9jC,GACA+jC;;QAGA,KAAKqB,GAAQrB,EAAQC,KACnB,MAAMD,EAAQE,GACTjkC,EAAM6jC;QAGb,IAAqB,SAAjBE,EAAQr+B,MACV,MAAMq+B,EAAQE,GACTjkC,EAAM6jC;QAIb,IAAMzxB,IAAiBpS,EAAM0nB,GAAiBqc;QAC1C3xB,KACF2xB,EAAQlyB,gBAAgBpN,KAAK2N;KAlBjC,CA5E4ByvB,GAAOkC,IACxB;IAQP;;;IAJIA,EAAQr+B,QACVq+B,EAAQhzB,GAAUtM,KAAKs/B,EAAQr+B,OAG7Bm8B,aAAiBW,OAAO;;;;;;;QAO1B,IACEuB,EAAQsB,SAAS8B,gCACjBpD,EAAQC,IAER,MAAMD,EAAQE,GAAY;QAE5B,OA+BN,SAAoBl8B,GAAkBg8B;YAGpC,KAFA,IAAMv5B,IAAsB,IACxB48B,IAAa,UACGr/B,OAAAA,cAAAA,KAAO;gBAAtB,IACCs/B,IAAczC,SAEhBb,EAAQuD,GAAqBF;gBAEZ,QAAfC;;;gBAGFA,IAAc;oBAAErb,WAAW;oBAE7BxhB,EAAO/F,KAAK4iC,IACZD;;YAEF,OAAO;gBAAE78B,YAAY;oBAAEC,QAAAA;;;SAhBzB,CA/BwBq3B,GAAoBkC;;IAEtC,OA+EN,SACE/jC,GACA+jC;QAEA,IAAc,SAAV/jC,GACF,OAAO;YAAEgsB,WAAW;;QACf,IAAqB,mBAAVhsB,GAChB,OAAO+jC,EAAQl1B,WAAW04B,GAASvnC;QAC9B,IAAqB,oBAAVA,GAChB,OAAO;YAAEuJ,cAAcvJ;;QAClB,IAAqB,mBAAVA,GAChB,OAAO;YAAE2I,aAAa3I;;QACjB,IAAIA,aAAiB4C,MAAM;YAChC,IAAMa,IAAYf,EAAU8kC,SAASxnC;YACrC,OAAO;gBAAEgJ,gBAAgB+6B,EAAQl1B,WAAWuW,EAAY3hB;;;QACnD,IAAIzD,aAAiB0C,GAAW;;;;YAIrC,IAAMe,IAAY,IAAIf,EACpB1C,EAAMwC,SACiC,MAAvCS,KAAKC,MAAMlD,EAAMyC,cAAc;YAEjC,OAAO;gBAAEuG,gBAAgB+6B,EAAQl1B,WAAWuW,EAAY3hB;;;QACnD,IAAIzD,aAAiBynC,IAC1B,OAAO;YACL39B,eAAe;gBACbC,UAAU/J,EAAM+J;gBAChBC,WAAWhK,EAAMgK;;;QAGhB,IAAIhK,aAAiBsjC,IAC1B,OAAO;YAAE35B,YAAYo6B,EAAQl1B,WAAW0b,GAAQvqB;;QAC3C,IAAIA,aAAiB0nC,IAAmB;YAC7C,IAAMC,IAAS5D,EAAQv2B,IACjBo6B,IAAU5nC,EAAM6nC,UAAUC;YAChC,KAAKF,EAAQhkC,QAAQ+jC,IACnB,MAAM5D,EAAQE,GACZ,wCACK2D,EAAQn6B,kBAAam6B,EAAQl6B,4CAChBi6B,EAAOl6B,kBAAak6B,EAAOj6B;YAGjD,OAAO;gBACL9D,gBAAgBm6B,EAAQl1B,WAAW4W,GACjCzlB,EAAM+nC,GAAKriC,MACX1F,EAAM6nC,UAAUC;;;QAGf,eAAI9nC,KAAuB+jC,EAAQW,2BACxC,OAAO;QAEP,MAAMX,EAAQE,GACZ,8BAA4BjC,GAAiBhiC;KArDnD,CA/E8B6hC,GAAOkC;;;AAKrC,SAASiC,GACP/+B,GACA88B;IAEA,IAAMt7B,IAA0B;IAiBhC,OAfIjB,EAAQP;;;IAGN88B,EAAQr+B,QAAQq+B,EAAQr+B,KAAK3B,SAAS,KACxCggC,EAAQhzB,GAAUtM,KAAKs/B,EAAQr+B,QAGjCnB,EAAQ0C,IAAK,SAACE,GAAaiR;QACzB,IAAMwuB,IAAchC,GAAUxsB,GAAK2rB,EAAQiE,GAAqB7gC;QAC7C,QAAfy/B,MACFn+B,EAAOtB,KAAOy/B;SAKb;QAAEp+B,UAAU;YAAEC,QAAAA;;;;;AAsHvB,SAASy+B,GAAoBrF;IAC3B,SACmB,mBAAVA,KACG,SAAVA,KACEA,aAAiBW,SACjBX,aAAiBj/B,QACjBi/B,aAAiBn/B,KACjBm/B,aAAiB4F,MACjB5F,aAAiByB,MACjBzB,aAAiB6F,MACjB7F,aAAiBiC;;;AAIvB,SAASgC,GACPnmC,GACAokC,GACAlC;IAEA,KAAKqF,GAAoBrF,OAAWK,GAAcL,IAAQ;QACxD,IAAMM,IAAcH,GAAiBH;QACrC,MAAoB,gBAAhBM,IAEI4B,EAAQE,GAAYtkC,IAAU,sBAE9BokC,EAAQE,GAAYtkC,IAAU,MAAMwiC;;;;;;aAQhC6E,GACdxC,GACA9+B;IAEA,IAAIA,aAAgB0gC,IAClB,OAAO1gC,EAAK+9B;IACP,IAAoB,mBAAT/9B,GAChB,OAAO2gC,GAAgC7B,GAAY9+B;IAGnD,MAAM,IAAIlE,EACRhD,EAAKI,kBACL,cAAY4lC;;;;;;;;;aAYT6B,GACP7B,GACA9+B;IAEA;QACE,gBH1oBmCA;YAErC,IADcA,EAAKuiC,OAAOrE,OACb,GACX,MAAM,IAAIpiC,EACRhD,EAAKI,kBACL,yBAAuB8G;YAI3B;gBACE,YAAWK,cAAAA,kBAAaL,EAAKE,MAAM;cACnC,OAAOwnB;gBACP,MAAM,IAAI5rB,EACRhD,EAAKI,kBACL,yBAAuB8G;;UG4nBKA,GAAM+9B;MACpC,OAAOrW;QACP,IAAMztB,KAYYutB,IAZWE,cAaPrtB,QAAQmtB,EAAMvtB,UAAUutB,EAAMrtB;QAZpD,MAAM,IAAI2B,EACRhD,EAAKI,kBACL,cAAY4lC,sCAA0C7kC;;;;;OAS5D,IAAsButB;;;AAKtB,SAASqZ,GAAkB37B,GAAuBC;IAChD,OAAOD,EAAS4D,MAAKzD,SAAAA;QAAKA,OAAAA,EAAEnH,QAAQiH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ICtlBpC3M,WACUk2B,GACR8T,GACQC,GACEC,GACFC,GACEC;kBALFlU,aAEA+T,aACEC,aACFC,GACEjqC,gBAAAkqC,GAnBJlqC;;;;;;QAMRA,UAAqB,GAErBA,UAAmD,MAC3CA,cAA+C,MAYrDA,KAAK24B,KAAU,IAAID,GAAmB1C,GAAO8T;;;;;;;;;WAU/ChqC,iBAAAA;QACE,4BACEE,KAAKgiB,0BACLhiB,KAAKgiB,6BACLhiB,KAAKgiB;;;;;;IAQTliB,iBAAAA;QACE,wBAAOE,KAAKgiB;;;;;;;;;IAUdliB,oBAAAA;0BACME,KAAKgiB,QASThiB,KAAKyC,SARHzC,KAAKmqC;;;;;;;;uBAiBTrqC;;;;;2BACME,KAAKoqC,uBACDpqC,KAAKqqC;;;;;;;;;;;;;;;;;;;IAYfvqC,iBAAAA;QAMEE,KAAKgiB,0BACLhiB,KAAK24B,GAAQtC;;;;;;;;;;;;IAafv2B,iBAAAA;QAAAA;;;gBAGME,KAAKsqC,QAA+B,SAAnBtqC,KAAKuqC,OACxBvqC,KAAKuqC,KAAYvqC,KAAKg2B,GAAMc,GAC1B92B,KAAK+pC,IAvJW,MAyJhB;YAAM/pC,OAAAA,EAAKwqC;;;qDAMP1qC,iBAAAA,SAAYyuB;QACpBvuB,KAAKyqC,MACLzqC,KAAK0qC,OAAQC,KAAKpc;;qGAIZzuB;;;gBACN,OAAIE,KAAKsqC,wBAGAtqC,KAAKqqC;;;;6CAKRvqC,iBAAAA;QACFE,KAAKuqC,OACPvqC,KAAKuqC,GAAUhU,UACfv2B,KAAKuqC,KAAY;;;;;;;;;;;;;;;wBAiBbzqC,SACN8qC,GACA9b;;;;;;;2BASA9uB,KAAKyqC,MACLzqC,KAAK24B,GAAQpC;;;oBAIbv2B,KAAK6qC,wBAEDD;;oBAEF5qC,KAAK24B,GAAQtC,UACJvH,KAASA,EAAMxtB,SAASlB,EAAKU;;oBAEtCoa,GAAS4T,EAAMrtB,aACfyZ,GACE;oBAEFlb,KAAK24B,GAAQmS,QACJhc,KAASA,EAAMxtB,SAASlB,EAAKS;;;oBAGtCb,KAAKiqC,GAAoBc;;oBAIP,SAAhB/qC,KAAK0qC,WACP1qC,KAAKgrC,MACLhrC,KAAK0qC,OAAOL,SACZrqC,KAAK0qC,SAAS;;;oBAKhB1qC,KAAKgiB,QAAQ4oB,mBAGP5qC,KAAKkqC,SAASe,GAAQnc;;;;;;;;;;;;;;IAOpBhvB,iBAAAA,eAiBFA,mBAAAA;QAAAA;QAMNE,KAAKgiB;QAEL,IAAMkpB,IAAsBlrC,KAAKmrC,GAA0BnrC,KAAK6qC,KAG1DA,IAAa7qC,KAAK6qC;;gBAExB7qC,KAAKiqC,GAAoB/mC,WAAWH,MAClCqoC,SAAAA;;;;;YAKMprC,EAAK6qC,OAAeA;;;;YAItB7qC,EAAKqrC,GAAYD;aAGpBtc,SAAAA;YACCoc,GAAoB;gBAClB,IAAMI,IAAW,IAAIloC,EACnBhD,EAAKG,SACL,iCAAiCuuB,EAAMvtB;gBAEzC,OAAOvB,EAAKurC,GAAkBD;;;OAM9BxrC,iBAAAA,SAAYsrC;QAAZtrC,cAMAorC,IAAsBlrC,KAAKmrC,GAA0BnrC,KAAK6qC;QAEhE7qC,KAAK0qC,SAAS1qC,KAAKwrC,GAASJ,IAC5BprC,KAAK0qC,OAAOe,IAAO;YACjBP,GAAoB;uBAKlBlrC,EAAKgiB,uBACEhiB,EAAKkqC,SAAUuB;;aAG1BzrC,KAAK0qC,OAAOO,IAASnc,SAAAA;YACnBoc,GAAoB;gBACXlrC,OAAAA,EAAKurC,GAAkBzc;;aAGlC9uB,KAAK0qC,OAAOgB,WAAWnd,SAAAA;YACrB2c,GAAoB;gBACXlrC,OAAAA,EAAK0rC,UAAUnd;;;OAKpBzuB,iBAAAA;QAAAA;QAKNE,KAAKgiB,0BAELhiB,KAAK24B,GAAQe,IAAcD;;;2BAMzBz5B,KAAKgiB,0BACLhiB,KAAKme;;;;;;IAMTre,iBAAAA,SAAkBgvB;;;;;QAahB,OARApJ,GAzbY,oBAybM,uBAAqBoJ,IAEvC9uB,KAAK0qC,SAAS,MAMP1qC,KAAKqqC,sBAAmCvb;;;;;;;;IASzChvB,iBAAAA,SACN6rC;QADM7rC;QAGN,OAAQ6G,SAAAA;YACN3G,EAAKg2B,GAAMyC,IAAiB;gBACtBz4B,OAAAA,EAAK6qC,OAAec,IACfhlC,OAEP+e,GAldM,oBAodJ;gBAEK1jB,QAAQC;;;;;IA+BvBnC,WACEk2B,GACAgU,GACA4B,GACQn7B,GACRy5B;QALFpqC;gBAOE0B,IAAAA,aACEw0B,0HAGAgU,GACA4B,GACA1B,yBATMz5B;;;WATgCo7B,SAsBhC/rC,iBAAAA,SACRsrC;QAEA,OAAOprC,KAAKgqC,GAAW8B,GACrB,UACAV;OAIMtrC,wBAAAA,SAAUisC;;QAElB/rC,KAAK24B,GAAQtC;QAEb,IAAMpS,IAAcjkB,KAAKyQ,WAAWu7B,GAAgBD,IAC9CE,IAAWjsC,KAAKyQ,WAAWy7B,GAC/BH;QAEF,OAAO/rC,KAAKkqC,SAAUiC,GAAcloB,GAAagoB;;;;;;;;IASnDnsC,iBAAAA,SAAMqkB;QACJ,IAAMioB,IAAyB;QAC/BA,EAAQ98B,WAAWtP,KAAKyQ,WAAW47B,IACnCD,EAAQE,YAAYtsC,KAAKyQ,WAAWwH,GAASkM;QAE7C,IAAMooB,IAASvsC,KAAKyQ,WAAW+7B,GAAsBroB;QACjDooB,MACFH,EAAQG,SAASA,IAGnBvsC,KAAKysC,GAAYL;;;;;;IAOnBtsC,iBAAAA,SAAQ0a;QACN,IAAM4xB,IAAyB;QAC/BA,EAAQ98B,WAAWtP,KAAKyQ,WAAW47B,IACnCD,EAAQxoB,eAAepJ,GACvBxa,KAAKysC,GAAYL;;EArEuBP;IAmH1C/rC,WACEk2B,GACAgU,GACA4B,GACQn7B,GACRy5B;QALFpqC;gBAOE0B,IAAAA,aACEw0B,sHAGAgU,GACA4B,GACA1B,yBATMz5B;QANVzQ;;;;;;;;;QA2BAA,oBAA8BuJ,EAAWuR;;WAhCA+wB,SAsCzCa;;;;;aAAAA;YACE,OAAO1sC,KAAK2sC;;;;;;IAId7sC,oBAAAA;QACEE,KAAK2sC,SACLnrC,YAAM2c;OAGEre,iBAAAA;QACJE,KAAK2sC,MACP3sC,KAAK4sC,GAAe;OAId9sC,iBAAAA,SACRsrC;QAEA,OAAOprC,KAAKgqC,GAAW8B,GACrB,SACAV;OAIMtrC,wBAAAA,SAAU+sC;QAQlB;;QANAxpC,KACIwpC,EAAc3b,cAGlBlxB,KAAK8sC,kBAAkB9sC,KAAKyQ,WAAW2X,GAAUykB,EAAc3b,cAE1DlxB,KAAK2sC,IAQH;;;;YAIL3sC,KAAK24B,GAAQtC;YAEb,IAAMjF,IAAUpxB,KAAKyQ,WAAWs8B,GAC9BF,EAAcG,cACdH,EAAc7iB,aAEViH,IAAgBjxB,KAAKyQ,WAAWmX,YACpCilB,EAAyB7iB;YAE3B,OAAOhqB,KAAKkqC,SAAU+C,GAAiBhc,GAAeG;;;gBAdtD,OAtqBoB/tB,IAkqBjBwpC,EAAcG,gBAAsD,MAAtCH,EAAcG,aAAarnC,SAG5D3F,KAAK2sC;QACE3sC,KAAKkqC,SAAUgD;;;;;;;IAuB1BptC,iBAAAA;;;QAKE,IAAMssC,IAAwB;QAC9BA,EAAQ98B,WAAWtP,KAAKyQ,WAAW47B,IACnCrsC,KAAKysC,GAAYL;;yEAInBtsC,iBAAAA,SAAe0wB;QAAf1wB,cAWQssC,IAAwB;YAC5Blb,aAAalxB,KAAKyQ,WAAW0b,GAAQnsB,KAAK8sC;YAC1CK,QAAQ3c,EAAU3oB,KAAIohB,SAAAA;gBAAYjpB,OAAAA,EAAKyQ,WAAW28B,GAAWnkB;;;QAG/DjpB,KAAKysC,GAAYL;;EAhIsBP;IC5kBzC/rC,WACkBkqC,GACA4B,GACAn7B;QAHlB3Q;gBAKE0B,IAAAA,2BAJgBwoC,GACAhqC,gBAAA4rC,GACA5rC,eAAAyQ;;+EAMlB3Q,iBAAAA,SAAqButC,GAAiBjB;QAAtCtsC;QACE,OAAOE,KAAK4rC,YACT1oC,WACAH,MAAKqoC,SAAAA;YACGprC,OAAAA,EAAKgqC,GAAWsD,GAAqBD,GAASjB,GAAShB;YAE/DpT,OAAOlJ,SAAAA;YAIN,MAHIA,EAAMxtB,SAASlB,EAAKS,mBACtBb,EAAK4rC,YAAYb,KAEbjc;;;kFAKZhvB,iBAAAA,SACEutC,GACAjB;QAFFtsC;QAIE,OAAOE,KAAK4rC,YACT1oC,WACAH,MAAKqoC,SAAAA;YACGprC,OAAAA,EAAKgqC,GAAWuD,GACrBF,GACAjB,GACAhB;YAGHpT,OAAOlJ,SAAAA;YAIN,MAHIA,EAAMxtB,SAASlB,EAAKS,mBACtBb,EAAK4rC,YAAYb,KAEbjc;;;GApDdhvB;;;IAGEE;;ICkBAF,WAAoB0tC;kBAAAA;;QAlBpBxtC,UAAuBof,MACfpf,iBAAwB,IAChCA;;;;;QAMAA,UAAgD;;;;;;;QAQhDA,UAAwC,IAAIytC;;4BAI5C3tC,SAAa+N;;;;;;oBAGX,IAFA7N,KAAK0tC,MAED1tC,KAAKwwB,UAAU7qB,SAAS,GAC1B,MAAM,IAAIvC,EACRhD,EAAKI,kBACL;2CDuDDi5B,SACL+T,GACA3/B;;;;;;2CAEM8/B,IAAgBxe,GAAUqe,IAC1BI,IAAS;wCACbt+B,UAAUq+B,EAAcl9B,WAAW47B;wCACnCvrB,WAAWjT,EAAKhG,KAAIsU,SAAAA;4CAAKwxB,OAAAA,EAAcl9B,WAAWiX,GAAOvL;;uDAEpCwxB,EAAcJ,GAGnC,qBAAqBK;;;oCAavB,OAhBMC,cAKAttB,IAAO,IAAI5L,KACjBk5B,EAAS1nC,SAAQsO,SAAAA;wCACf,IAAMf,IAAMi6B,EAAcl9B,WAAWq9B,GAAkBr5B;wCACvD8L,EAAKjN,IAAII,EAAI3K,IAAItH,YAAYiS;yCAEzBnF,IAA0B,sBAChCV,EAAK1H,SAAQ4C,SAAAA;wCACX,IAAM2K,IAAM6M,EAAKzd,IAAIiG,EAAItH;wCAvGxB4B,KAwGYqQ,IACbnF,EAAOlI,KAAKqN;yCAEPnF;;;;qBAzBFkrB,CCpD2Cz5B,KAAKwtC,IAAW3/B;;;oBAQ9D,0BARM0S,cACDpa,SAAQuN,SAAAA;wBACPA,aAAeY,MAAcZ,aAAe3B,KAC9C/R,EAAK+tC,GAAcr6B,KAEnB9N;yBAGG2a;;;;OAGTzgB,kBAAAA,SAAIiJ,GAAkB2J;QACpB1S,KAAKguC,MAAMt7B,EAAKu7B,GAAYllC,GAAK/I,KAAKgS,GAAajJ,MACnD/I,KAAKkuC,GAAY3vB,IAAIxV;OAGvBjJ,qBAAAA,SAAOiJ,GAAkB2J;QACvB;YACE1S,KAAKguC,MAAMt7B,EAAKu7B,GAAYllC,GAAK/I,KAAKmuC,GAAsBplC;UAC5D,OAAOimB;YACPhvB,KAAKouC,KAAiBpf;;QAExBhvB,KAAKkuC,GAAY3vB,IAAIxV;OAGvBjJ,qBAAAA,SAAOiJ;QACL/I,KAAKguC,MAAM,EAAC,IAAIz5B,GAAexL,GAAK/I,KAAKgS,GAAajJ,QACtD/I,KAAKkuC,GAAY3vB,IAAIxV;4BAGvBjJ;;;;;;oBAGE,IAFAE,KAAK0tC,MAED1tC,KAAKouC,IACP,MAAMpuC,KAAKouC;2BAETC,IAAYruC,KAAKsuC;;oBAErBtuC,KAAKwwB,UAAUrqB,SAAQ8iB,SAAAA;wBACrBolB,IAAYA,EAAU1yB,OAAOsN,EAASlgB;;;;oBAIxCslC,EAAUloC,SAAQ,SAAC4C,GAAKwlC;wBACtBvuC,EAAKwwB,UAAUnqB,KAAK,IAAImO,GAAezL,GAAK/I,EAAKgS,GAAajJ;yCDX7D0wB,SACL+T,GACAhd;;;;;;2CAEMmd,IAAgBxe,GAAUqe,IAC1BI,IAAS;wCACbt+B,UAAUq+B,EAAcl9B,WAAW47B;wCACnCc,QAAQ3c,EAAU3oB,KAAIipB,SAAAA;4CAAK6c,OAAAA,EAAcl9B,WAAW28B,GAAWtc;;uDAE1C6c,EAAcL,GAGnC,UAAUM;;;oCACZ,OAJMC,+BAICF,EAAcl9B,WAAWs8B,GAC9Bc,EAASb,cACTa,EAAS7jB;;;;qBAfNyP,CCamBz5B,KAAKwtC,IAAWxtC,KAAKwwB;;;;qCAC3CxwB,KAAKwuC;;;;OAGC1uC,iBAAAA,SAAc4T;QACpB,IAAI+6B;QAEJ,IAAI/6B,aAAe3B,IACjB08B,IAAa/6B,EAAIlC,cACZ;YAAA,MAAIkC,aAAeY,KAIxB,MAxGJ1O;;wBAsGI6oC,IAAanpC,EAAgByB;;QAK/B,IAAM2nC,IAAkB1uC,KAAKsuC,GAAaxrC,IAAI4Q,EAAI3K;QAClD,IAAwB,SAApB2lC;YACF,KAAKD,EAAWjpC,QAAQkpC;;YAEtB,MAAM,IAAItrC,EACRhD,EAAKY,SACL;eAIJhB,KAAKsuC,KAAetuC,KAAKsuC,GAAa9yB,GAAO9H,EAAI3K,KAAK0lC;;;;;;IAQlD3uC,iBAAAA,SAAaiJ;QACnB,IAAMyI,IAAUxR,KAAKsuC,GAAaxrC,IAAIiG;QACtC,QAAK/I,KAAKkuC,GAAY5vB,IAAIvV,MAAQyI,IACzBI,GAAaF,WAAWF,KAExBI,GAAagY;;;;;IAOhB9pB,iBAAAA,SAAsBiJ;QAC5B,IAAMyI,IAAUxR,KAAKsuC,GAAaxrC,IAAIiG;;;gBAGtC,KAAK/I,KAAKkuC,GAAY5vB,IAAIvV,MAAQyI,GAAS;YACzC,IAAIA,EAAQhM,QAAQF,EAAgByB;;;;;;;;;;YAYlC,MAAM,IAAI3D,EACRhD,EAAKI,kBACL;;wBAIJ,OAAOoR,GAAaF,WAAWF;;;;gBAI/B,OAAOI,GAAaD;OAIhB7R,oBAAAA,SAAM0wB;QACZxwB,KAAK0tC,MACL1tC,KAAKwwB,YAAYxwB,KAAKwwB,UAAU1Y,OAAO0Y;OAGjC1wB,iBAAAA;;IC9HRA,WACU43B,GACAiX;kBADAjX,aACAiX;;QAzBF3uC;;;;;;QAORA,UAA8B;;;;;;QAO9BA,UAA0D;;;;;;QAO1DA;;;;;;;;;WAcAF,iBAAAA;QAAAA;QACmC,MAA7BE,KAAK4uC,OACP5uC,KAAK6uC,6BAML7uC,KAAK8uC,KAAmB9uC,KAAK03B,GAAWZ,qDA1Dd,MA6DxB;mBACE92B,EAAK8uC,KAAmB,MAKxB9uC,EAAK+uC,GACH,8CAGF/uC,EAAK6uC;YAME7sC,QAAQC;;;;;;;;;IAYvBnC,iBAAAA,SAAyBgvB;kCACnB9uB,KAAKgiB,QACPhiB,KAAK6uC,+BAaL7uC,KAAK4uC;QACD5uC,KAAK4uC,MA/GmB,MAgH1B5uC,KAAKgvC,MAELhvC,KAAK+uC,GACH,mDAC+BjgB,EAAMrtB;QAGvCzB,KAAK6uC;;;;;;;;;IAYX/uC,kBAAAA,SAAImvC;QACFjvC,KAAKgvC,MACLhvC,KAAK4uC,KAAsB,6BAEvBK;;;QAGFjvC,KAAKkvC,UAGPlvC,KAAK6uC,GAAgBI;OAGfnvC,iBAAAA,SAAgBmvC;QAClBA,MAAajvC,KAAKgiB,UACpBhiB,KAAKgiB,QAAQitB,GACbjvC,KAAK2uC,GAAmBM;OAIpBnvC,iBAAAA,SAAmCqvC;QACzC,IAAM5tC,IACJ,8CAA4C4tC;QAI1CnvC,KAAKkvC,MACPh0B,GAAS3Z,IACTvB,KAAKkvC,WAELxpB,GAxKU,sBAwKQnkB;OAIdzB,iBAAAA;QACwB,SAA1BE,KAAK8uC,OACP9uC,KAAK8uC,GAAiBvY,UACtBv2B,KAAK8uC,KAAmB;;;IC/D5BhvC;;;;IAIUsvC;;IAEA5B,GACA9V,GACRiX,GACAU;QATFvvC;kBAIUsvC,aAEA5B,aACA9V;;;;;;;;;;;;;;;;;;QA1CV13B,UAAyC;;;;;;;;;;QAWzCA,UAAwB,IAAI2U,KAK5B3U,UAA8D;;;;;QAMtDA,0BAEAA;;;;;;QAORA,cAeEA,KAAKqvC,KAAsBA,GAC3BrvC,KAAKqvC,GAAoBC,IAAa9oB,SAAAA;YACpCkR,EAAWe,IAAiBgB;;;;;mCACtBz5B,KAAKuvC,QACP7pB,GA5FM,eA8FJ;4CAEI1lB,KAAKwvC;;;;;;;;;;;aAKjBxvC,KAAKyvC,KAAqB,IAAIC,GAC5BhY,GACAiX;;QAIF3uC,KAAK2vC,cH8BPnC,GACAxX,GACAkU;YAEA,IAAMyD,IAAgBxe,GAAUqe;YAChC,OAAO,IAAIoC,GACT5Z,GACA2X,EAAc3D,IACd2D,EAAc/B,aACd+B,EAAcl9B,YACdy5B;UGxC4ClqC,KAAKwtC,IAAW9V,GAAY;YACtEmY,IAAQ7vC,KAAK8vC,GAAkB/X,KAAK/3B;YACpC+vC,IAAS/vC,KAAKgwC,GAAmBjY,KAAK/3B;YACtCiwC,IAAejwC,KAAKkwC,GAAoBnY,KAAK/3B;YAG/CA,KAAKmwC,cHSP3C,GACAxX,GACAkU;YAEA,IAAMyD,IAAgBxe,GAAUqe;YAChC,OAAO,IAAI4C,GACTpa,GACA2X,EAAc3D,IACd2D,EAAc/B,aACd+B,EAAcl9B,YACdy5B;UGnB4ClqC,KAAKwtC,IAAW9V,GAAY;YACtEmY,IAAQ7vC,KAAKqwC,GAAkBtY,KAAK/3B;YACpC+vC,IAAS/vC,KAAKswC,GAAmBvY,KAAK/3B;YACtCuwC,IAAqBvwC,KAAKwwC,GAAyBzY,KAAK/3B;YACxDywC,IAAkBzwC,KAAKitC,GAAiBlV,KAAK/3B;;;;;;kBAcjDF,oBAAAA;QACE,OAAOE,KAAK0wC;;+CAId5wC,4BAAAA;QAEE,OADAE,KAAK2wC,qBACE3wC,KAAK4wC;wBAGN9wC;;;;;;2BACFE,KAAKuvC,QACPvvC,IAAAA,KAAKmwC,oBAAoCnwC,KAAKovC,GAAWnR;;;;2BAAzDj+B,EAAiB8sC,4BAEb9sC,KAAK6wC,OACP7wC,KAAK8wC,OAEL9wC,KAAKyvC,GAAmBn8B;oCAIpBtT,KAAK+wC;;;;;;;;;;;;;;;;iCAQfjxC;;;;;2BACEE,KAAK2wC,qCACC3wC,KAAKgxC;;;;;oBAGXhxC,KAAKyvC,GAAmBn8B;;;;wBAGlBxT;;;;;2CACAE,KAAKmwC,GAAYc;;;qDACjBjxC,KAAK2vC,GAAYsB;;;qCAEnBjxC,KAAKkxC,GAAcvrC,SAAS,MAC9B+f,GA7KU,eA+KR,gCAA8B1lB,KAAKkxC,GAAcvrC;oBAEnD3F,KAAKkxC,KAAgB,KAGvBlxC,KAAKmxC;;;;wBAGPrxC;;;;;2BACE4lB,GAxLY,eAwLM,+BAClB1lB,KAAK2wC;oCACC3wC,KAAKgxC;;;qCACXhxC,KAAKqvC,GAAoB+B;;;oBAIzBpxC,KAAKyvC,GAAmBn8B;;;;;;;;;IAO1BxT,qBAAAA,SAAOqkB;QACDnkB,KAAKqxC,GAAc/yB,IAAI6F,EAAW3J;;QAKtCxa,KAAKqxC,GAAc/9B,IAAI6Q,EAAW3J,UAAU2J,IAExCnkB,KAAK6wC;;QAEP7wC,KAAK8wC,OACI9wC,KAAK2vC,GAAYrF,QAC1BtqC,KAAKsxC,GAAiBntB;;;;;;IAQ1BrkB,iBAAAA,SAAS0a;QAMPxa,KAAKqxC,GAAc99B,OAAOiH,IACtBxa,KAAK2vC,GAAYrF,QACnBtqC,KAAKuxC,GAAmB/2B,IAGM,MAA5Bxa,KAAKqxC,GAAc9qC,SACjBvG,KAAK2vC,GAAYrF,OACnBtqC,KAAK2vC,GAAY6B,OACRxxC,KAAKuvC;;;;QAIdvvC,KAAKyvC,GAAmBn8B;;iEAM9BxT,iBAAAA,SAAuB0a;QACrB,OAAOxa,KAAKqxC,GAAcvuC,IAAI0X,MAAa;;iEAI7C1a,iBAAAA,SAAuB0a;QACrB,OAAOxa,KAAKyxC,GAAWpsB,GAAuB7K;;;;;;IAOxC1a,iBAAAA,SAAiBqkB;QACvBnkB,KAAK0xC,GAAuBpsB,GAA2BnB,EAAW3J,WAClExa,KAAK2vC,GAAYgC,GAAMxtB;;;;;;;IAQjBrkB,iBAAAA,SAAmB0a;QACzBxa,KAAK0xC,GAAuBpsB,GAA2B9K,IACvDxa,KAAK2vC,GAAYiC,GAAQp3B;OAGnB1a,iBAAAA;QAMNE,KAAK0xC,KAAwB,IAAIG,GAAsB7xC,OACvDA,KAAK2vC,GAAYxxB,SACjBne,KAAKyvC,GAAmBqC;;;;;;IAOlBhyC,iBAAAA;QACN,OACEE,KAAKuvC,SACJvvC,KAAK2vC,GAAYvF,QAClBpqC,KAAKqxC,GAAc9qC,OAAO;OAI9BzG,iBAAAA;QACE,QAAQE,KAAK+xC,MAAmB/xC,KAAKgyC,aAAahyC,KAAK2wC;OAGjD7wC,iBAAAA;QACNE,KAAK0xC,KAAwB;wBAGvB5xC;;;;uBACNE,KAAKqxC,GAAclrC,SAAQ,SAACge,GAAY3J;oBACtCxa,EAAKsxC,GAAiBntB;;;;wBAIlBrkB,SAAyBgvB;;;uBAU/B9uB,KAAKmxC;;gBAGDnxC,KAAK6wC,QACP7wC,KAAKyvC,GAAmBwC,OAExBjyC,KAAK8wC;;;;gBAKL9wC,KAAKyvC,GAAmBn8B;;;wBAIpBxT,SACNmkB,GACAtJ;;;;;;wBAGA3a,KAAKyvC,GAAmBn8B,8BAGtB2Q,aAAuBsE,0BACvBtE,EAAYjC,SACZiC,EAAY/B;;oBALdliB;;;;uEAUUA,KAAKkyC,GAAkBjuB;;;;;;yCAE7ByB,GA7VQ,eA+VN,oCACAzB,EAAYhC,UAAU7a,KAAK,MAC3B4nB;oCAEIhvB,KAAKmyC,GAA4BnjB;;;;;;;;;wBAKvC/K,aAAuByE,KACzB1oB,KAAK0xC,GAAuBU,GAAqBnuB,KACxCA,aAAuB+E,KAChChpB,KAAK0xC,GAAuBW,GAAsBpuB,KAMlDjkB,KAAK0xC,GAAuBY,GAAmBruB;oBAG5CtJ,EAAgBnV,QAAQF,EAAgByB,QAZzCkd;;;;yEAcwCjkB,KAAKovC,GAAWjR;;;2BAAlDsB,cACF9kB,EAAgBxN,EAAUsyB,MAA8B,oBAGpDz/B,KAAKuyC,GAAmB53B;;;;;;;;;;;;;2BAGhC+K,GA7XQ,eA6XU,4DACZ1lB,KAAKmyC,GAA4BnjB;;;;;;;;;;;;;;;;qBAUrClvB,SAAkCkvB;;;;;;oBACxC,KAAIyI,GAA4BzI,IAsB9B,MAAMA;;2BAjBNhvB,KAAK+xC,yBAGC/xC,KAAKgxC;;;;qCACXhxC,KAAKyvC,GAAmBn8B;;oBAGxBtT,KAAK03B,GAAW8a,IAAiB/Y;;;;;;;;2CAC/B/T,GAtZQ,eAsZU,8CAIZ1lB,KAAKovC,GAAWjR;;;;;;qDACtBn+B,KAAK+xC,yBACC/xC,KAAK4wC;;;;;;;;;;;;;;;;;IAYT9wC,iBAAAA,SAAmB6a;QAAnB7a,cAKAilB,IAAc/kB,KAAK0xC,GAAuBe,GAC9C93B;;;;QAuDF,OAlDAoK,EAAY7D,GAAc/a,SAAQ,SAAC8Z,GAAQzF;YACzC,IAAIyF,EAAOpF,YAAY8H,OAAwB,GAAG;gBAChD,IAAMwB,IAAankB,EAAKqxC,GAAcvuC,IAAI0X;;gCAEtC2J,KACFnkB,EAAKqxC,GAAc/9B,IACjBkH,GACA2J,EAAWwa,GAAgB1e,EAAOpF,aAAaF;;;;;QAQvDoK,EAAY5D,GAAiBhb,SAAQqU,SAAAA;YACnC,IAAM2J,IAAankB,EAAKqxC,GAAcvuC,IAAI0X;YAC1C,IAAK2J,GAAL;;;gBAOAnkB,EAAKqxC,GAAc/9B,IACjBkH,GACA2J,EAAWwa,GACTp1B,EAAWuR,IACXqJ,EAAWxJ;;;gBAMf3a,EAAKuxC,GAAmB/2B;;;;;gBAMxB,IAAMk4B,IAAoB,IAAI33B,GAC5BoJ,EAAW5J,QACXC,qCAEA2J,EAAWzJ;gBAEb1a,EAAKsxC,GAAiBoB;;aAIjB1yC,KAAKyxC,GAAWkB,GAAiB5tB;;yDAIlCjlB,SACNmkB;;;;;;oBAGM6K,IAAQ7K,EAAkB/B,cACT+B,IAAAA,EAAYhC;;;2BAAZgC,gBAAlB2uB,UAEC5yC,KAAKqxC,GAAc/yB,IAAI9D,qBACnBxa,KAAKyxC,GAAWoB,GAAar4B,GAAUsU;;;8BAC7C9uB,KAAKqxC,GAAc99B,OAAOiH,IAC1Bxa,KAAK0xC,GAAuB9tB,aAAapJ;;;;;;2BALtByJ;;;;;;;;;;;;;;;;qBAkBzBnkB;;;;;;2BACME,KAAK8yC,QACDC,IACJ/yC,KAAKkxC,GAAcvrC,SAAS,IACxB3F,KAAKkxC,GAAclxC,KAAKkxC,GAAcvrC,SAAS,GAAG2qB,WzB5hB/B;oCyB8hBLtwB,KAAKovC,GAAW4D,GAClCD;;;2BAGY,UAJR/hB,uCAK8B,MAA9BhxB,KAAKkxC,GAAcvrC,UACrB3F,KAAKmwC,GAAYqB;;;;2BAGnBxxC,KAAKizC,GAAmBjiB,oBAClBhxB,KAAK+wC;;;;;;;;;2BAIX/wC,KAAKkzC,QACPlzC,KAAKmzC;;;;;;;;;IAQDrzC,iBAAAA;QACN,OACEE,KAAKuvC,QAAmBvvC,KAAKkxC,GAAcvrC,SA5hBtB;;;IAiiBzB7F,iBAAAA;QACE,OAAOE,KAAKkxC,GAAcvrC;;;;;;IAOpB7F,iBAAAA,SAAmBkxB;QAKzBhxB,KAAKkxC,GAAc7qC,KAAK2qB,IAEpBhxB,KAAKmwC,GAAY7F,QAAYtqC,KAAKmwC,GAAYiD,MAChDpzC,KAAKmwC,GAAYvD,GAAe5b,EAAMR;OAIlC1wB,iBAAAA;QACN,OACEE,KAAKuvC,SACJvvC,KAAKmwC,GAAY/F,QAClBpqC,KAAKkxC,GAAcvrC,SAAS;OAIxB7F,iBAAAA;QAKNE,KAAKmwC,GAAYhyB;wBAGXre;;;uBACNE,KAAKmwC,GAAYkD;;;OAGXvzC,iBAAAA;QAAAA;;gBAEN,OAAOE,KAAKovC,GACTlR,GAAmBl+B,KAAKmwC,GAAYrD,iBACpC/pC,MAAK;;YAEJ,KAAoB/C,WAAAA,IAAAA,EAAKkxC,IAALlxC,cAAAA;gBAAf,IAAMgxB;gBACThxB,EAAKmwC,GAAYvD,GAAe5b,EAAMR;;YAGzCwH,MAAMuJ;OAGHzhC,iBAAAA,SACNmxB,GACAG;QAFMtxB,cAUAkxB,IAAQhxB,KAAKkxC,GAAcoC,SAC3BC,IAAUjiB,GAAoBrG,KAClC+F,GACAC,GACAG,GACApxB,KAAKmwC,GAAYrD;QAEnB,OAAO9sC,KAAKyxC,GAAW+B,GAAqBD,GAASxwC,MAAK;YAGjD/C,OAAAA,EAAK+wC;;wBAIRjxC,SAAyBgvB;;;;;2BAY3BA,KAAS9uB,KAAKkxC,GAAcvrC,SAAS,IACnC3F,KAAKmwC,GAAYiD,qBAEbpzC,KAAKyzC;;;;;;;;;;2CAKLzzC,KAAK0zC;;;;;;;;;;;oBAKT1zC,KAAKkzC,QACPlzC,KAAKmzC;;;;;;;wBAMHrzC,SAA2BgvB;;;;;;gBAIjC,OAAI9T,GAAiB8T,EAAMxtB,0BACzBokB,GAtpBU,eAwpBR,0EACA1lB,KAAKmwC,GAAYrD;gBAEnB9sC,KAAKmwC,GAAYrD,kBAAkBvjC,EAAWuR,IAEvC9a,KAAKovC,GACTlR,GAAmB30B,EAAWuR,IAC9Bkd,MAAMuJ;;;wBAOLzhC,SAAuBgvB;;;;;;gBAG7B,O7C7nBK9T,GAD6B1Z,I6C8nBRwtB,EAAMxtB,S7C7nBDA,MAASlB,EAAKY,W6CgoBrCgwB,IAAQhxB,KAAKkxC,GAAcoC,2BAKjCtzC,KAAKmwC,GAAYwD;gBAEV3zC,KAAKyxC,GACTmC,GAAkB5iB,EAAMV,SAASxB,GACjC/rB,MAAK;oBAGG/C,OAAAA,EAAK+wC;;;;OAOpBjxC,iBAAAA;QACE,OAAO,IAAI+zC,GAAY7zC,KAAKwtC;wBAGtB1tC;;;;;2BACNE,KAAK2wC,qCACC3wC,KAAKgxC;;;qCACXhxC,KAAKyvC,GAAmBn8B,8CAClBtT,KAAK0wC;;;;;;;wBAGb5wC;;;;;2BACME,KAAKuvC;;;;oBAIP7pB,GA/sBU,eA+sBQ,sEACZ1lB,KAAKwvC;;;;;;;;;;;;;;qBAOf1vC,SAAwBkyC;;;;;;2BACtBhyC,KAAKgyC,YAAYA,GAEbA,KAAahyC,KAAK2wC,iCACd3wC,KAAK0wC;;;;;;4BACDsB,IAAAA,2CACJhyC,KAAKgxC;;;kCACXhxC,KAAKyvC,GAAmBn8B;;;;;;;;;;;;ICjW9BxT;QACEE,uBAAkBwf;;WAElB1f,iBAAAA,SAAe0a;QACbxa,KAAK8zC,kBAAkB9zC,KAAK8zC,gBAAgBv1B,IAAI/D;OAGlD1a,iBAAAA,SAAkB0a;QAChBxa,KAAK8zC,kBAAkB9zC,KAAK8zC,gBAAgBvgC,OAAOiH;;;;;;IAOrD1a,iBAAAA;QACE,IAAM4S,IAA0B;YAC9BohC,iBAAiB9zC,KAAK8zC,gBAAgB3sC;YACtC4sC,cAAcvvC,KAAKC;;QAErB,OAAOy/B,KAAKC,UAAUzxB;;;IAwlB1B5S;QACEE,UAAqB,IAAIg0C,IACzBh0C,UAA+D,IAE/DA,UAA6C,MAC7CA,UAAkE,MAClEA,UAEW;;WAEXF,iBAAAA,SAAmBwwB;;OAInBxwB,iBAAAA,SACEwwB,GACAtO,GACA8M;;OAKFhvB,iBAAAA,SAAoB0a;QAElB,OADAxa,KAAKi0C,GAAWC,GAAe15B,IACxBxa,KAAKm0C,GAAW35B,MAAa;OAGtC1a,iBAAAA,SACE0a,GACAwH,GACA8M;QAEA9uB,KAAKm0C,GAAW35B,KAAYwH;OAG9BliB,iBAAAA,SAAuB0a;QACrBxa,KAAKi0C,GAAWG,GAAkB55B;OAGpC1a,iBAAAA,SAAmB0a;QACjB,OAAOxa,KAAKi0C,GAAWH,gBAAgBx1B,IAAI9D;OAG7C1a,iBAAAA,SAAgB0a;eACPxa,KAAKm0C,GAAW35B;OAGzB1a,iBAAAA;QACE,OAAOE,KAAKi0C,GAAWH;OAGzBh0C,iBAAAA,SAAoB0a;QAClB,OAAOxa,KAAKi0C,GAAWH,gBAAgBx1B,IAAI9D;OAG7C1a,oBAAAA;QAEE,OADAE,KAAKi0C,KAAa,IAAID,IACfhyC,QAAQC;OAGjBnC,iBAAAA,SACE+B,GACA46B,GACAC;;OAKF58B,iBAAAA,SAAeu0C;;OAIfv0C,iBAAAA,eAEAA,iBAAAA,SAAoB4a;UCrkCpB5a,SAAmBiJ;IAAA/I,WAAA+I;QAGnBjJ,SAAmBiJ;IAAA/I,WAAA+I;;IA4CnBjJ,WACUwgB;;IAEAg0B;QAFAt0C,aAAAsgB,aAEAg0B,GAjBVt0C,UAAsC;;;;;;;QAOtCA;;QAGAA,UAAyBsf;;QAEzBtf,UAAsBsf,MAOpBtf,KAAKu0C,KAAc,IAAI30B,GAAYU,EAAMk0B,GAAczc,KAAKzX;;WAO9Dm0B;;;;;aAAAA;YACE,OAAOz0C,KAAKs0C;;;;;;;;;;;;;;;IAadx0C,iBAAAA,SACE2gB,GACAi0B;QAFF50C,cAIQ60C,IAAYD,IACdA,EAAgBC,KAChB,IAAIC,IACFC,IAAiBH,IACnBA,EAAgBH,KAChBv0C,KAAKu0C,IACLO,IAAiBJ,IACjBA,EAAgBh0B,KAChB1gB,KAAK0gB,IACLq0B,IAAiBF,GACjBG,QAWEC,IACJj1C,KAAKsgB,MAAM40B,QAAqBL,EAAetuC,SAASvG,KAAKsgB,MAAMpa,QAC/D2uC,EAAeM,SACf,MACAC,IACJp1C,KAAKsgB,MAAM+0B,QAAoBR,EAAetuC,SAASvG,KAAKsgB,MAAMpa,QAC9D2uC,EAAermC,UACf;;QAwFN,IAtFAiS,EAAWvE,IACT,SAACnT,GAAkBusC;YACjB,IAAMC,IAASV,EAAe/xC,IAAIiG,IAC9B+Y,IAASwzB,aAAuBvjC,KAAWujC,IAAc;YACzDxzB,MAQFA,IAAS9hB,EAAKsgB,MAAM9G,QAAQsI,KAAUA,IAAS;YAGjD,IAAM0zB,MAA4BD,KAC9Bv1C,EAAK0gB,GAAYpC,IAAIi3B,EAAOxsC,MAE1B0sC,MAA4B3zB,MAC9BA,EAAO3L;;;YAGNnW,EAAK0gB,GAAYpC,IAAIwD,EAAO/Y,QAAQ+Y,EAAO1P,wBAG5CsjC;;YAGAH,KAAUzzB,IACMyzB,EAAO7iC,OAAOlN,QAAQsc,EAAOpP,UAqBpC8iC,MAA8BC,MACvCd,EAAUgB,MAAM;gBAAEv1B;gBAA2B1M,KAAKoO;gBAClD4zB,UArBK11C,EAAK41C,GAA4BL,GAAQzzB,OAC5C6yB,EAAUgB,MAAM;gBACdv1B;gBACA1M,KAAKoO;gBAEP4zB,SAGGT,KACCj1C,EAAKsgB,MAAMk0B,GAAc1yB,GAAQmzB,KAAkB,KACpDG,KACCp1C,EAAKsgB,MAAMk0B,GAAc1yB,GAAQszB,KAAmB;;;;YAKtDJ,YAOIO,KAAUzzB,KACpB6yB,EAAUgB,MAAM;gBAAEv1B;gBAAwB1M,KAAKoO;gBAC/C4zB,UACSH,MAAWzzB,MACpB6yB,EAAUgB,MAAM;gBAAEv1B;gBAA0B1M,KAAK6hC;gBACjDG,SAEIT,KAAkBG;;;;YAIpBJ,UAIAU,MACE5zB,KACFizB,IAAiBA,EAAex2B,IAAIuD,IAElCgzB,IADEW,IACeX,EAAev2B,IAAIxV,KAEnB+rC,EAAevhC,OAAOxK,OAGzCgsC,IAAiBA,EAAexhC,OAAOxK;YACvC+rC,IAAiBA,EAAevhC,OAAOxK;aAO3C/I,KAAKsgB,MAAM40B,QAAqBl1C,KAAKsgB,MAAM+0B,MAC7C,MAAON,EAAexuC,OAAOvG,KAAKsgB,MAAYpa,SAAE;YAC9C,IAAMqvC,IAASv1C,KAAKsgB,MAAM40B,OACtBH,EAAeI,SACfJ,EAAevmC;YACnBumC,IAAiBA,EAAexhC,OAAOgiC,EAAQxsC,MAC/C+rC,IAAiBA,EAAevhC,OAAOgiC,EAAQxsC,MAC/C4rC,EAAUgB,MAAM;gBAAEv1B;gBAA0B1M;;;QAQhD,OAAO;YACLmiC,IAAad;YACbe,IAAAnB;YACAoB,IAAAf;YACAgB,IAAalB;;OAITh1C,iBAAAA,SACNy1C,GACAzzB;;;;;;;;QASA,OACEyzB,EAAOp/B,MACP2L,EAAO1P,0BACN0P,EAAO3L;;;;;;;;;;;;;IAeZrW,iBAAAA,SACE2gB,GACAw1B,GACA9yB;QAHFrjB,cASQ0gB,IAAUxgB,KAAKu0C;QACrBv0C,KAAKu0C,KAAc9zB,EAAW8zB,IAC9Bv0C,KAAK0gB,KAAcD,EAAWC;;QAE9B,IAAML,IAAUI,EAAWk0B,GAAUuB;QACrC71B,EAAQtS,MAAK,SAACooC,GAAIC;YAsLtB,OAAA,SAA2BD,GAAgBC;gBACzC,IAAMxpB,IAAS3M,SAAAA;oBACb,QAAQA;sBACN;wBACE,OAAO;;sBACT;sBAEA;;;;wBAIE,OAAO;;sBACT;wBACE,OAAO;;sBACT;wBACE,OAtdYra;;;gBA0dlB,OAAOgnB,EAAMupB,KAAMvpB,EAAMwpB;aAnB3B,CApL0BD,EAAG/1B,MAAMg2B,EAAGh2B,SAC9BpgB,EAAKsgB,MAAMk0B,GAAc2B,EAAGziC,KAAK0iC,EAAG1iC;aAIxC1T,KAAKq2C,GAAkBlzB;QACvB,IAAMmzB,IAAeL,IACjBj2C,KAAKi2C,OACL,IAEEM,IADsC,MAA7Bv2C,KAAKw2C,GAAejwC,QAAcvG,KAAKiI,sCAEhD2Y,IAAmB21B,MAAiBv2C,KAAKy2C;QAG/C,OAFAz2C,KAAKy2C,KAAYF,GAEM,MAAnBl2B,EAAQ1a,UAAiBib,IAcpB;YACLqrB,UAXyB,IAAIlrB,GAC7B/gB,KAAKsgB,OACLG,EAAW8zB,IACX/zB,GACAH,GACAI,EAAWC,sBACX61B,GACA31B;;YAKA81B,IAAAJ;YAdK;YAAEI,IAAAJ;;;;;;;;IAuBbx2C,iBAAAA,SAAuBu0C;QACrB,OAAIr0C,KAAKiI,kCAAWosC;;;;;QAKlBr0C,KAAKiI,SACEjI,KAAK22C,GACV;YACEd,IAAa71C,KAAKu0C;YAClBuB,IAAW,IAAIlB;YACfoB,IAAah2C,KAAK0gB;YAClBq1B;;2CAMG;YAAEW,IAAc;;;;;;IAOnB52C,iBAAAA,SAAgBiJ;;QAEtB,QAAI/I,KAAKs0C,GAAiBh2B,IAAIvV;;UAIzB/I,KAAKu0C,GAAYj2B,IAAIvV,OAOtB/I,KAAKu0C,GAAYzxC,IAAIiG,GAAMoN;;;;;;IAWzBrW,iBAAAA,SAAkBqjB;QAAlBrjB;QACFqjB,MACFA,EAAa1B,GAAetb,SAC1B4C,SAAAA;YAAQ/I,OAAAA,EAAKs0C,KAAmBt0C,EAAKs0C,GAAiB/1B,IAAIxV;aAE5Doa,EAAazB,GAAkBvb,SAAQ4C,SAAAA,SAMvCoa,EAAaxB,GAAiBxb,SAC5B4C,SAAAA;YAAQ/I,OAAAA,EAAKs0C,KAAmBt0C,EAAKs0C,GAAiB/gC,OAAOxK;aAE/D/I,KAAKiI,KAAUkb,EAAalb;OAIxBnI,iBAAAA;QAAAA;;gBAEN,KAAKE,KAAKiI,IACR,OAAO;;;gBAKT,IAAM2uC,IAAoB52C,KAAKw2C;QAC/Bx2C,KAAKw2C,KAAiBl3B,MACtBtf,KAAKu0C,GAAYpuC,SAAQuN,SAAAA;YACnB1T,EAAK62C,GAAgBnjC,EAAI3K,SAC3B/I,EAAKw2C,KAAiBx2C,EAAKw2C,GAAej4B,IAAI7K,EAAI3K;;;QAKtD,IAAMsX,IAAiC;QAWvC,OAVAu2B,EAAkBzwC,SAAQ4C,SAAAA;YACnB/I,EAAKw2C,GAAel4B,IAAIvV,MAC3BsX,EAAQha,KAAK,IAAIywC,GAAqB/tC;aAG1C/I,KAAKw2C,GAAerwC,SAAQ4C,SAAAA;YACrB6tC,EAAkBt4B,IAAIvV,MACzBsX,EAAQha,KAAK,IAAI0wC,GAAmBhuC;aAGjCsX;;;;;;;;;;;;;;;;;;;;;;IAuBTvgB,iBAAAA,SAA8Bk3C;QAC5Bh3C,KAAKs0C,KAAmB0C,EAAYlW,IACpC9gC,KAAKw2C,KAAiBl3B;QACtB,IAAMmB,IAAazgB,KAAKi3C,GAAkBD,EAAYl2B;QACtD,OAAO9gB,KAAK22C,GAAal2B;;;;;;;;IAS3B3gB,iBAAAA;QACE,OAAOihB,GAAam2B,GAClBl3C,KAAKsgB,OACLtgB,KAAKu0C,IACLv0C,KAAK0gB,sBACL1gB,KAAKy2C;;;ICxbT32C,WACmB43B,GACAyf,GACAC,GACAtf;kBAHAJ,aACAyf,GACAn3C,sBAAAo3C,aACAtf,GAPnB93B,UAPkB,GAgBhBA,KAAK24B,KAAU,IAAID,GACjB14B,KAAK03B;;;WAMT53B,iBAAAA;QACEE,KAAKq3C;OAGCv3C,iBAAAA;QAAAA;QACNE,KAAK24B,GAAQe,IAAcD;;;;2BACnBxG,IAAcjzB,KAAKm3C,GAAYG,OAC/BC,IAAcv3C,KAAKw3C,GAAqBvkB,OAE5CskB,EACGx0C,MAAKwL,SAAAA;wBACJvO,EAAK03B,GAAWe,IAAiB;4BACxBxF,OAAAA,EACJwkB,SACA10C,MAAK;gCACJ/C,EAAK83B,GAAS71B,QAAQsM;gCAEvBypB,OAAM0f,SAAAA;gCACL13C,EAAK23C,GAAuBD;;;wBAInC1f,OAAM4f,SAAAA;wBACL53C,EAAK23C,GAAuBC;;;;;OAM9B93C,iBAAAA,SAAqBmzB;QAC3B;YACE,IAAMskB,IAAcv3C,KAAKo3C,eAAenkB;YACxC,QACEhpB,EAAkBstC,MACjBA,EAAYvf,SACZuf,EAAYx0C,OAORw0C,KALLv3C,KAAK83B,GAAS7F,OACZtwB,MAAM;YAED;UAGT,OAAOmtB;;YAGP,OADA9uB,KAAK83B,GAAS7F,OAAOnD,IACd;;OAIHhvB,iBAAAA,SAAuBgvB;QAAvBhvB;QACFE,KAAK63C,KAAU,KAAK73C,KAAK83C,GAA4BhpB,MACvD9uB,KAAK63C,MAAW,GAChB73C,KAAK03B,GAAWe,IAAiB;mBAC/Bz4B,EAAKq3C,MACEr1C,QAAQC;eAGjBjC,KAAK83B,GAAS7F,OAAOnD;OAIjBhvB,iBAAAA,SAA4BgvB;QAClC,IAAmB,oBAAfA,EAAMptB,MAA0B;;;YAGlC,IAAMJ,IAAQwtB,EAAyBxtB;YACvC,OACW,cAATA,KACS,0BAATA,MACC0Z,GAAiB1Z;;QAGtB;;UCtCFxB;;;;AAISwgB;;;;;AAKA9F;;;;;;;AAOAu9B;IAZA/3C,aAAAsgB,GAKAtgB,gBAAAwa,GAOAxa,YAAA+3C;QAMTj4C,SAAmBiJ;IAAA/I,WAAA+I;;;;;;;IAQnB/I;;IAsEAF,WACYsvC,GACA+H;;IAEAa,GACF11C,GACA21C;kBALE7I,aACA+H,aAEAa,GACFh4C,mBAAAsC,aACA21C,GA3CVj4C,UAA0D;QAE1DA,UAA8B,IAAIs7B,IAA4B4c,SAAAA;YAC5DA,OAAAA,EAAEjqC;aAEJjO,UAA4B,IAAI2U;;;;;QAKhC3U,UAAkD;;;;;QAKlDA,UAAoC,IAAIub,GACtCjT,EAAYvC;;;;;QAMd/F,UAA2C,IAAI2U,KAI/C3U,UAA8B,IAAIm4C;;QAElCn4C,UAAgC;;QAIhCA,UAAiC,IAAI2U,KACrC3U,UAAiCw3B,GAAkB4gB,MAE3Cp4C;;WAWRq4C;aAAAA;YACE;;;;;kFAIFv4C,wBAAAA,SAAUw4C;QAURt4C,KAAKs4C,KAAqBA;;;;;;;yBAQ5Bx4C,SAAawgB;;;;;;2BACXtgB,KAAKu4C,GAAiB,cAKhBC,IAAYx4C,KAAKy4C,GAAkB31C,IAAIwd;;;;;;;oBAQ3C9F,IAAWg+B,EAAUh+B,UACrBxa,KAAKg4C,GAAkBU,GAAoBl+B,IAC3C4a,IAAeojB,EAAUT,KAAKY;;;2CAEL34C,KAAKovC,GAAWwJ,GAAet4B,EAAMrI;;;2BAAxDkM,cAEAqC,IAASxmB,KAAKg4C,GAAkBU,GACpCv0B,EAAW3J,WAEbA,IAAW2J,EAAW3J,0BACDxa,KAAK64C,GACxBv4B,GACA9F,GACW,cAAXgM;;;oBAHF4O,cAKIp1B,KAAK84C,MACP94C,KAAKm3C,GAAY4B,OAAO50B;;;oBAI5B,wBAAOiR;;;;;;;;;qBAOCt1B,SACRwgB,GACA9F,GACAvS;;;;;;2CAE0BjI,KAAKovC,GAAW4J,GACxC14B;;;;oBA4BF,OA7BM02B,cAIAe,IAAO,IAAIkB,GAAK34B,GAAO02B,EAAYlW,KACnCoY,IAAiBnB,EAAKd,GAAkBD,EAAYl2B,YACpDq4B,IAA0B73B,GAAaC,GAC3C/G,GACAvS,iCAAWjI,KAAKq0C;oBAEZvU,IAAaiY,EAAKpB,GACtBuC;gDAC4Bl5C,KAAK84C,IACjCK,IAEFn5C,KAAKo5C,GAAoB5+B,GAAUslB,EAAWwW,KAOxC5jC,IAAO,IAAI2mC,GAAU/4B,GAAO9F,GAAUu9B;sCAC5C/3C,KAAKy4C,GAAkBnlC,IAAIgN,GAAO5N,IAC9B1S,KAAKs5C,GAAgBh7B,IAAI9D,KAC3Bxa,KAAKs5C,GAAgBx2C,IAAI0X,GAAWnU,KAAKia,KAEzCtgB,KAAKs5C,GAAgBhmC,IAAIkH,GAAU,EAAC8F;oBAE/Bwf,EAAWmM;;;;;0DAIpBnsC,SAAewgB;;;;;;;;oBASb,OARAtgB,KAAKu4C,GAAiB,eAEhBC,IAAYx4C,KAAKy4C,GAAkB31C,IAAIwd,KAKvCi5B,IAAUv5C,KAAKs5C,GAAgBx2C,IAAI01C,EAAUh+B,WACvC7U,SAAS,sBACnB3F,KAAKs5C,GAAgBhmC,IACnBklC,EAAUh+B,UACV++B,EAAQ9xC,QAAOywC,SAAAA;gCAAMA,EAAE1yC,QAAQ8a;+BAEjCtgB,KAAKy4C,GAAkBllC,OAAO+M,QAK5BtgB,KAAK84C;;;oBAGP94C,KAAKg4C,GAAkBwB,GAAuBhB,EAAUh+B,WAC5Bxa,KAAKg4C,GAAkByB,GACjDjB,EAAUh+B,kDAIJxa,KAAKovC,GACRsK,GAAclB,EAAUh+B,2CACxBzX,MAAK;wBACJ/C,EAAKg4C,GAAkB2B,GAAgBnB,EAAUh+B,WACjDxa,EAAKm3C,GAAYyC,GAASpB,EAAUh+B,WACpCxa,EAAK65C,GAAuBrB,EAAUh+B;wBAEvCwd,MAAMuJ;;;;;;;;;2BAGXvhC,KAAK65C,GAAuBrB,EAAUh+B,2BAChCxa,KAAKovC,GAAWsK,GACpBlB,EAAUh+B;;;;;;;;;;;;;;;;;;;;;;wBAgBhB1a,SAAYkxB,GAAmB8oB;;;;;;oBAC7B95C,KAAKu4C,GAAiB;;;uEAGCv4C,KAAKovC,GAAW2K,GAAW/oB;;;2BAA1CziB,cACNvO,KAAKg4C,GAAkBgC,GAAmBzrC,EAAO+hB,UACjDtwB,KAAKi6C,GAAoB1rC,EAAO+hB,SAASwpB,oBACnC95C,KAAKk6C,GAAgC3rC,EAAO8R;;;qDAC5CrgB,KAAKm3C,GAAYpG;;;;;;yCAIjBjiB,IAAQ2L,GAA6BzL,GAAG,4BAC9C8qB,EAAa7nB,OAAOnD;;;;;;;;;;;;;;;;;;;;;;;;;IAqBxBhvB,6BAAAA,SACE43B,GACA0f,GACAtf;QAEA,IAAIqiB,GACFziB,GACA13B,KAAKm3C,IACLC,GACAtf,GACAsiB;wBAGJt6C,SAAuBilB;;;;;;oBACrB/kB,KAAKu4C,GAAiB;;;uEAEEv4C,KAAKovC,GAAWuD,GAAiB5tB;;;2BAAjD1E;;oBAEN0E,EAAY7D,GAAc/a,SAAQ,SAACgd,GAAc3I;wBAC/C,IAAM6/B,IAAkBr6C,EAAKs6C,GAA+Bx3C,IAC1D0X;wBAEE6/B;;;wBA9W8Bh3C,GAkX9B8f,EAAa1B,GAAelb,OAC1B4c,EAAazB,GAAkBnb,OAC/B4c,EAAaxB,GAAiBpb,QAC9B,IAGA4c,EAAa1B,GAAelb,OAAO,IACrC8zC,EAAgBE,UACPp3B,EAAazB,GAAkBnb,OAAO,IA1XjBlD,GA4X5Bg3C,EAAgBE,MAGTp3B,EAAaxB,GAAiBpb,OAAO,MA/XhBlD,GAiY5Bg3C,EAAgBE;wBAGlBF,EAAgBE;yCAMhBv6C,KAAKk6C,GAAgC75B,GAAS0E;;;;;;;2CAE9Cwc;;;;;;;;;;;;;;;IAQVzhC,iBAAAA,SACEu0C,GACAmG;QAEAx6C,KAAKu4C,GAAiB;QACtB,IAAMkC,IAAmB;QACzBz6C,KAAKy4C,GAAkBtyC,SAAQ,SAACma,GAAOk4B;YACrC,IAAM1Y,IAAa0Y,EAAUT,KAAK2C,GAAuBrG;YAKrDvU,EAAWmM,YACbwO,EAAiBp0C,KAAKy5B,EAAWmM;aAGrCjsC,KAAKs4C,GAAoBqC,GAAoBtG,IAC7Cr0C,KAAKs4C,GAAoBnM,GAAcsO,IACvCz6C,KAAKq0C,cAAcA;wBAGrBv0C,SAAmB0a,GAAoB8X;;;;;;2BACrCtyB,KAAKu4C,GAAiB;;oBAGtBv4C,KAAKg4C,GAAkB4C,GAAiBpgC,GAAU,YAAY8X,IAExD+nB,IAAkBr6C,KAAKs6C,GAA+Bx3C,IAAI0X,KAC1DqgC,IAAWR,KAAmBA,EAAgBtxC,QAYlDqY,KAHIA,IAAkB,IAAI7F,GACxBjT,EAAYvC,IAEoByV,GAChCq/B,GACA,IAAIvmC,GAAWumC,GAAUv1C,EAAgByB;oBAErCsa,IAAyB/B,KAAiBf,IAAIs8B,IAC9CC,IAAQ,IAAIt5B,GAChBlc,EAAgByB;yCACK,IAAI4N;4CACD,IAAI6J,GAAoBvZ,KAChDmc,GACAC,oBAGIrhB,KAAK2yC,GAAiBmI;;;;;;;;;oBAO5B96C,KAAK+6C,KAA0B/6C,KAAK+6C,GAAwBp/B,OAC1Dk/B,IAEF76C,KAAKs6C,GAA+B/mC,OAAOiH,IAC3Cxa,KAAKg7C;;;2CAECh7C,KAAKovC,GACRsK,GAAcl/B,qCACdzX,MAAK;wBAAM/C,OAAAA,EAAK65C,GAAuBr/B,GAAU8X;wBACjD0F,MAAMuJ;;;;;;;;;;wBAIbzhC,SACEm7C;;;;;;oBAEAj7C,KAAKu4C,GAAiB,2BAEhBjoB,IAAU2qB,EAAoBjqB,MAAMV;;;;;oBAM1CtwB,KAAKk7C,GAAoB5qB,cAAoB,OAE7CtwB,KAAKm7C,GAA8B7qB;;;uEAGXtwB,KAAKovC,GAAW1R,GACpCud;;;2BADI56B,cAGNrgB,KAAKg4C,GAAkBoD,GAAoB9qB,GAAS,iCAC9CtwB,KAAKk6C,GAAgC75B;;;;;;2CAErCkhB;;;;;;;;;;wBAIVzhC,SACEwwB,GACAxB;;;;;;oBAEA9uB,KAAKu4C,GAAiB;;;;;oBAMtBv4C,KAAKk7C,GAAoB5qB,GAASxB,IAElC9uB,KAAKm7C,GAA8B7qB;;;uEAGXtwB,KAAKovC,GAAWiM,GAAY/qB;;;2BAA5CjQ,cACNrgB,KAAKg4C,GAAkBoD,GAAoB9qB,GAAS,YAAYxB,oBAC1D9uB,KAAKk6C,GAAgC75B;;;;;;2CAErCkhB;;;;;;;;;;;;;;;qBAQVzhC,SAAoCyxB;;;;;;oBAC7BvxB,KAAKm3C,GAAY5H,QACpB7pB,GAhfU,cAkfR;;;;uEAM2B1lB,KAAKovC,GAAWpR;;;oBAC7C,Q7BpiByB,O6BmiBnBsd,sCAGJ/pB,EAAStvB,gBAILs5C,IAAYv7C,KAAKw7C,GAAuB14C,IAAIw4C,MAAmB,IAC3Dj1C,KAAKkrB;oBACfvxB,KAAKw7C,GAAuBloC,IAAIgoC,GAAgBC;;;yCAE1CE,IAAiBhhB,GACrBzL,GACA;oBAEFuC,EAASU,OAAOwpB;;;;;;;;;;;;IAQZ37C,iBAAAA,SAA8BwwB;SACnCtwB,KAAKw7C,GAAuB14C,IAAIwtB,MAAY,IAAInqB,SAAQorB,SAAAA;YACvDA,EAAStvB;aAGXjC,KAAKw7C,GAAuBjoC,OAAO+c;;oFAI7BxwB,iBAAAA,SAAwC47C;QAC9C17C,KAAKw7C,GAAuBr1C,SAAQo1C,SAAAA;YAClCA,EAAUp1C,SAAQorB,SAAAA;gBAChBA,EAASU,OAAO,IAAI7uB,EAAehD,EAAKE,WAAWo7C;;aAIvD17C,KAAKw7C,GAAuBG;OAGtB77C,iBAAAA,SACNwwB,GACAiB;QAEA,IAAIqqB,IAAe57C,KAAK67C,GAAsB77C,KAAKsC,YAAYw5C;QAC1DF,MACHA,IAAe,IAAIrgC,GACjBtW,MAGJ22C,IAAeA,EAAapgC,GAAO8U,GAASiB,IAC5CvxB,KAAK67C,GAAsB77C,KAAKsC,YAAYw5C,OAAWF;;;;;;IAO/C97C,iBAAAA,SAAoBwwB,GAAkBxB;QAC9C,IAAI8sB,IAAe57C,KAAK67C,GAAsB77C,KAAKsC,YAAYw5C;;;gBAI/D,IAAIF,GAAc;YAChB,IAAMrqB,IAAWqqB,EAAa94C,IAAIwtB;YAC9BiB,MAKEzC,IACFyC,EAASU,OAAOnD,KAEhByC,EAAStvB,WAEX25C,IAAeA,EAAajgC,OAAO2U,KAErCtwB,KAAK67C,GAAsB77C,KAAKsC,YAAYw5C,OAAWF;;OAIjD97C,iBAAAA,SACR0a,GACAsU;QAFQhvB;yBAERgvB,WAEA9uB,KAAKg4C,GAAkBwB,GAAuBh/B;QAQ9C,KAAoBxa,WAAAA,IAAAA,KAAKs5C,GAAgBx2C,IAAI0X,IAAzBxa,cAAAA;YAAf,IAAMsgB;YACTtgB,KAAKy4C,GAAkBllC,OAAO+M,IAC1BwO,KACF9uB,KAAKs4C,GAAoByD,GAAaz7B,GAAOwO;;QAIjD9uB,KAAKs5C,GAAgB/lC,OAAOiH,IAExBxa,KAAK84C,MACW94C,KAAKg8C,GAAkBC,GAAsBzhC,GACrDrU,SAAQ00C,SAAAA;YACK76C,EAAKg8C,GAAkBE,GAAYrB;;YAGtD76C,EAAKm8C,GAAkBtB;;OAMvB/6C,iBAAAA,SAAkBiJ;;;QAGxB,IAAMqzC,IAAgBp8C,KAAK+6C,GAAwBj4C,IAAIiG;QACjC,SAAlBqzC,MAKJp8C,KAAKm3C,GAAYyC,GAASwC,IAC1Bp8C,KAAK+6C,KAA0B/6C,KAAK+6C,GAAwBp/B,OAAO5S,IACnE/I,KAAKs6C,GAA+B/mC,OAAO6oC,IAC3Cp8C,KAAKg7C;OAGGl7C,iBAAAA,SACR0a,GACA87B;QAEA,KAA0BA,WAAAA,OAAAA,cAAAA;YAArB,IAAM+F;YACLA,aAAuBtF,MACzB/2C,KAAKg8C,GAAkBjc,GAAasc,EAAYtzC,KAAKyR,IACrDxa,KAAKs8C,GAAiBD,MACbA,aAAuBvF,MAChCpxB,GApoBQ,cAooBU,kCAAkC22B,EAAYtzC;YAChE/I,KAAKg8C,GAAkBhc,GAAgBqc,EAAYtzC,KAAKyR,IACnCxa,KAAKg8C,GAAkBE,GAC1CG,EAAYtzC;;YAIZ/I,KAAKm8C,GAAkBE,EAAYtzC,QAGrCnD;;OAKE9F,iBAAAA,SAAiBu8C;QACvB,IAAMtzC,IAAMszC,EAAYtzC;QACnB/I,KAAK+6C,GAAwBj4C,IAAIiG,OACpC2c,GAtpBU,cAspBQ,4BAA4B3c,IAC9C/I,KAAKu8C,GAAyBl2C,KAAK0C;QACnC/I,KAAKg7C;;;;;;;;;;IAYDl7C,iBAAAA;QACN,MACEE,KAAKu8C,GAAyB52C,SAAS,KACvC3F,KAAK+6C,GAAwBx0C,OAAOvG,KAAKi4C,MACzC;YACA,IAAMlvC,IAAM/I,KAAKu8C,GAAyBjJ,SACpC8I,IAAgBp8C,KAAKw8C,GAAuBn0C;YAClDrI,KAAKs6C,GAA+BhnC,IAClC8oC,GACA,IAAIK,GAAgB1zC,KAEtB/I,KAAK+6C,KAA0B/6C,KAAK+6C,GAAwBv/B,GAC1DzS,GACAqzC,IAEFp8C,KAAKm3C,GAAY4B,OACf,IAAIh+B,GACF9D,GAAM4T,GAAO9hB,EAAIzB,MAAM2Q,MACvBmkC,6BAEAtmB,GAAe4mB;;;;IAOvB58C,iBAAAA;QACE,OAAOE,KAAK+6C;;;IAIdj7C,iBAAAA;QACE,OAAOE,KAAKu8C;wBAGJz8C,SACRugB,GACA0E;;;;;;2BAEM43B,IAA2B,IAC3BC,IAA2C,IAC3CC,IAAyC,IAE/C78C,KAAKy4C,GAAkBtyC,SAAQ,SAAC6d,GAAGw0B;wBACjCqE,EAAiBx2C,KACfrE,QAAQC,UACLc,MAAK;4BACJ,IAAMm2C,IAAiBV,EAAUT,KAAKd,GAAkB52B;4BACxD,OAAK64B,EAAelE,KAMbh1C,EAAKovC,GACT4J,GAAaR,EAAUl4B,qCACvBvd,MAAK;oCAAG+d;gCACA03B,OAAAA,EAAUT,KAAKd,GACpBn2B,GACAo4B;kCAVGA;;;;oDAcVn2C,MAAMm2C,SAAAA;4BACL,IAAM/1B,IACJ4B,KAAeA,EAAY7D,GAAcpe,IAAI01C,EAAUh+B,WACnDslB,IAAa0Y,EAAUT,KAAKpB,GAChCuC;wDAC4Bl5C,EAAK84C,IACjC31B;4BAMF,IAJAnjB,EAAKo5C,GACHZ,EAAUh+B,UACVslB,EAAWwW,KAETxW,EAAWmM,UAAU;gCACnBjsC,EAAK84C,MACP94C,EAAKg4C,GAAkB4C,GACrBpC,EAAUh+B,UACVslB,EAAWmM,SAAStrB,YAAY,gBAAgB,YAIpDg8B,EAASt2C,KAAKy5B,EAAWmM;gCACzB,IAAMxrB,IAAa4U,GAAiBynB,GAClCtE,EAAUh+B,UACVslB,EAAWmM;gCAEb2Q,EAAqBv2C,KAAKoa;;;yCAM9Bze,QAAQmwB,IAAI0qB;;;qCAClB78C,KAAKs4C,GAAoBnM,GAAcwQ,oBACjC38C,KAAKovC,GAAW2N,GAAuBH;;;;;;;OAGrC98C,iBAAAA,SAAiBk9C,wBAO3Bl9C,SAA6B+B;;;;;;2BACN7B,KAAKsC,YAAYkD,QAAQ3D,2CAGvB7B,KAAKovC,GAAW6N,GAAiBp7C;;;2BAAhD0M,cACNvO,KAAKsC,cAAcT;;oBAGnB7B,KAAKk9C,GACH;;oBAGFl9C,KAAKg4C,GAAkBiF,GACrBp7C,GACA0M,EAAOkuB,IACPluB,EAAOmuB,qBAEH18B,KAAKk6C,GAAgC3rC,EAAOsuB;;;;;;2CAG9C78B,KAAKm3C,GAAYgG;;;;;;;OAGzBr9C,4BAAAA;QACE,OAAOE,KAAKm3C,GAAYzG;OAG1B5wC,6BAAAA;QACE,OAAOE,KAAKm3C,GAAYiG;OAG1Bt9C,iBAAAA,SAAuB0a;QACrB,IAAM6/B,IAAkBr6C,KAAKs6C,GAA+Bx3C,IAAI0X;QAChE,IAAI6/B,KAAmBA,EAAgBE,IACrC,OAAOj7B,KAAiBf,IAAI87B,EAAgBtxC;QAE5C,IAAIs0C,IAAS/9B,MACPi6B,IAAUv5C,KAAKs5C,GAAgBx2C,IAAI0X;QACzC,KAAK++B,GACH,OAAO8D;QAET,KAAoB9D,WAAAA,IAAAA,GAAAA,cAAAA,KAAS;YAAxB,IAAMj5B,UACHk4B,IAAYx4C,KAAKy4C,GAAkB31C,IAAIwd;YAE7C+8B,IAASA,EAAOC,GAAU9E,EAAUT,KAAKwF;;QAE3C,OAAOF;;UC52Bbv9C;IACEE,kBACAA,UAA6B;;IAyB7BF,WAAoB2xC;kBAAAA,GARpBzxC,UAAkB,IAAIs7B,IAAqC4c,SAAAA;YACzDA,OAAAA,EAAEjqC;aAGIjO,6CAERA,UAAwD,IAAIytC,KAG1DztC,KAAKyxC,GAAW+L,UAAUx9C;;gCAG5BF,SAAaoqC;;;;;;wBACL5pB,IAAQ4pB,EAAS5pB,OACnBm9B,SAEAC,IAAY19C,KAAKu5C,GAAQz2C,IAAIwd,QAE/Bm9B,QACAC,IAAY,IAAIC,MAGdF,GALCC;;;;uDAODA,IAAAA,mBAA2B19C,KAAKyxC,GAAWsH,OAAOz4B;;;2BAAlDo9B,EAAUE;;;oBAOV,qBALMnC,IAAiBhhB,GACrBzL,GACA,8BAA4Bkb,EAAS5pB;0CAEvC4pB,EAAS2T,QAAQpC;;;2BAKrBz7C,KAAKu5C,GAAQjmC,IAAIgN,GAAOo9B,IACxBA,EAAUI,GAAUz3C,KAAK6jC;;oBAGLA,EAASwQ,GAAuB16C,KAAKq0C,cAMrDqJ,EAAUE,MACQ1T,EAAS6T,GAAeL,EAAUE,OAEpD59C,KAAKg+C;;;;wBAKXl+C,SAAeoqC;;;;gBAab,OAZM5pB,IAAQ4pB,EAAS5pB,OACnB29B,SAEEP,IAAY19C,KAAKu5C,GAAQz2C,IAAIwd,QAE3B7Z,IAAIi3C,EAAUI,GAAUv2C,QAAQ2iC,OAC7B,MACPwT,EAAUI,GAAUztB,OAAO5pB,GAAG;gBAC9Bw3C,IAA4C,MAA/BP,EAAUI,GAAUn4C,SAIjCs4C,sBACFj+C,KAAKu5C,GAAQhmC,OAAO+M,IACbtgB,KAAKyxC,GAAWmI,GAASt5B;;;OAIpCxgB,iBAAAA,SAAco+C;QAEZ,KADA,IAAIC,eACmBD,OAAAA,cAAAA,KAAW;YAA7B,IAAMN,UACHt9B,IAAQs9B,EAASt9B,OACjBo9B,IAAY19C,KAAKu5C,GAAQz2C,IAAIwd;YACnC,IAAIo9B,GAAW;gBACb,KAAuBA,WAAAA,IAAAA,EAAUI,IAAVJ,cAAAA;yBACRK,GAAeH,OAC1BO;;gBAGJT,EAAUE,KAAWA;;;QAGrBO,KACFn+C,KAAKg+C;OAITl+C,iBAAAA,SAAawgB,GAAcwO;QACzB,IAAM4uB,IAAY19C,KAAKu5C,GAAQz2C,IAAIwd;QACnC,IAAIo9B,GACF,KAAuBA,WAAAA,IAAAA,EAAUI,IAAVJ,cAAAA;iBACZG,QAAQ/uB;;;;gBAMrB9uB,KAAKu5C,GAAQhmC,OAAO+M;OAGtBxgB,iBAAAA,SAAoBu0C;QAClBr0C,KAAKq0C,cAAcA;QACnB,IAAI8J;QACJn+C,KAAKu5C,GAAQpzC,SAAQ,SAAC6d,GAAG05B;YACvB,KAAuBA,WAAAA,IAAAA,EAAUI,IAAVJ,cAAAA;;qBAERhD,GAAuBrG,OAClC8J;;aAIFA,KACFn+C,KAAKg+C;OAITl+C,iBAAAA,SAA2Bs+C;QACzBp+C,KAAKq+C,GAAyB9/B,IAAI6/B;;;QAGlCA,EAAS/1C;OAGXvI,iBAAAA,SAA8Bs+C;QAC5Bp+C,KAAKq+C,GAAyB9qC,OAAO6qC;;;IAI/Bt+C,iBAAAA;QACNE,KAAKq+C,GAAyBl4C,SAAQi4C,SAAAA;YACpCA,EAAS/1C;;;;IAmCbvI,WACWwgB,GACDg+B,GACRpoC;QAFSlW,aAAAsgB,aACDg+B;;;;;QAVVt+C,cAIAA,UAAoC,MAE5BA,6CAONA,KAAKkW,UAAUA,KAAW;;;;;;;;WAS5BpW,iBAAAA,SAAey+C;QAMb,KAAKv+C,KAAKkW,QAAQsoC,wBAAwB;YAGxC;;YADA,IAAM/9B,IAAmC,WACjB89B,IAAAA,EAAK99B,YAAL89B,cAAAA;gBAAnB,IAAMv7B;qCACLA,EAAU5C,QACZK,EAAWpa,KAAK2c;;YAGpBu7B,IAAO,IAAIx9B,GACTw9B,EAAKj+B,OACLi+B,EAAKh+B,MACLg+B,EAAK/9B,IACLC,GACA89B,EAAK79B,IACL69B,EAAK59B,WACL49B,EAAK39B;;;QAIT,IAAIu9B;QAYJ,OAXKn+C,KAAKy+C,KAKCz+C,KAAK0+C,GAAiBH,OAC/Bv+C,KAAKs+C,GAAcj2C,KAAKk2C,IACxBJ,UANIn+C,KAAK2+C,GAAwBJ,GAAMv+C,KAAKq0C,iBAC1Cr0C,KAAK4+C,GAAkBL;QACvBJ,SAOJn+C,KAAKu+C,KAAOA,GACLJ;OAGTr+C,sBAAAA,SAAQgvB;QACN9uB,KAAKs+C,GAAcxvB,MAAMA;;kDAI3BhvB,iBAAAA,SAAuBu0C;QACrBr0C,KAAKq0C,cAAcA;QACnB,IAAI8J;QASJ,OAPEn+C,KAAKu+C,OACJv+C,KAAKy+C,MACNz+C,KAAK2+C,GAAwB3+C,KAAKu+C,IAAMlK,OAExCr0C,KAAK4+C,GAAkB5+C,KAAKu+C,KAC5BJ;QAEKA;OAGDr+C,iBAAAA,SACNy+C,GACAlK;;QAQA,KAAKkK,EAAK59B,WACR;;;gBAKF,IAAMk+B,gCAAcxK;;;gBAGpB,SAAIr0C,KAAKkW,QAAQ4oC,MAAyBD,KASlCN,EAAKh+B,KAAKnX,mCAAairC;;WAGzBv0C,iBAAAA,SAAiBy+C;;;;;QAKvB,IAAIA,EAAK99B,WAAW9a,SAAS,GAC3B;QAGF,IAAMo5C,IACJ/+C,KAAKu+C,MAAQv+C,KAAKu+C,GAAKloC,qBAAqBkoC,EAAKloC;QACnD,UAAIkoC,EAAK39B,OAAoBm+B,aACpB/+C,KAAKkW,QAAQsoC;;;;WAShB1+C,iBAAAA,SAAkBy+C;QAKxBA,IAAOx9B,GAAam2B,GAClBqH,EAAKj+B,OACLi+B,EAAKh+B,MACLg+B,EAAK79B,IACL69B,EAAK59B,YAEP3gB,KAAKy+C,SACLz+C,KAAKs+C,GAAcj2C,KAAKk2C;;;;WC3S1Bz+C,iBAAAA,SAAsB+7B;QACpB77B,KAAKg/C,KAAqBnjB;OAG5B/7B,iBAAAA,SACEmzB,GACA3S,GACA1F,GACAkmB;QAJFhhC;;;;gBAcE,OAAIwgB,EAAM2+B,QAMNrkC,EAA6BpV,QAAQF,EAAgByB,SALhD/G,KAAKk/C,GAA0BjsB,GAAa3S,KAS9CtgB,KAAKg/C,GAAoBpiB,GAAa3J,GAAa6N,GAAYz4B,MACpEyY,SAAAA;YACE,IAAMq+B,IAAkBn/C,EAAKo/C,GAAW9+B,GAAOQ;YAE/C,QACGR,EAAM40B,QAAqB50B,EAAM+0B,SAClCr1C,EAAKg1C,GACH10B,EAAMvJ,IACNooC,GACAre,GACAlmB,KAGK5a,EAAKk/C,GAA0BjsB,GAAa3S,MAGjD6N,QAAiBK,EAASC,SAC5B/I,GACE,wBACA,yDACA9K,EAA6BnZ,YAC7B6e,EAAM7e;YAMHzB,EAAKg/C,GAAoBxqB,GAC9BvB,GACA3S,GACA1F,GACAvS,MAAKg3C,SAAAA;;;;uBAILF,EAAgBh5C,SAAQuN,SAAAA;oBACtB2rC,IAAiBA,EAAe7jC,GAAO9H,EAAI3K,KAAK2K;qBAE3C2rC;;;;;;4EAOPv/C,iBAAAA,SACNwgB,GACAQ;;;QAIA,IAAI2T,IAAe,IAAIjW,IAAoB,SAACtG,GAAIC;YAC9CmI,OAAAA,EAAMk0B,GAAct8B,GAAIC;;QAO1B,OALA2I,EAAU3a,SAAQ,SAAC6d,GAAGlS;YAChBA,aAAoBC,MAAYuO,EAAM9G,QAAQ1H,OAChD2iB,IAAeA,EAAalW,IAAIzM;aAG7B2iB;;;;;;;;;;;;;IAcD30B,iBAAAA,SACNiX,GACAuoC,GACAxe,GACAye;;;QAIA,IAAIze,EAAWv6B,SAAS+4C,EAAsB/4C,MAC5C;;;;;;;;;gBAWF,IAAMi5C,wBACJzoC,IACIuoC,EAAsBnK,SACtBmK,EAAsB9wC;QAC5B,SAAKgxC,MAKHA,EAAenpC,oBACfmpC,EAAehuC,QAAQrE,EAAUoyC,KAA4B;OAIzDz/C,iBAAAA,SACNmzB,GACA3S;QAUA,OARI6N,QAAiBK,EAASC,SAC5B/I,GACE,wBACA,gDACApF,EAAM7e;QAIHzB,KAAKg/C,GAAoBxqB,GAC9BvB,GACA3S,GACAhb,EAAgByB;;;ICxJpBjH,WACmBkzB,GACAsM;kBADAtM,aACAsM;;;;;QAhBnBt/B,UAAyC;;QAGzCA,UAA+B;;;;;QAMvBA,uBAA8BuJ,EAAWuR;;QAGjD9a,UAA+B,IAAIwe,GAAUgjB,GAAaC;;WAO1D3hC,iBAAAA,SAAWmzB;QACT,OAAOjB,GAAmB/vB,QAAsC,MAA9BjC,KAAK+yB,GAAcptB;OAGvD7F,iBAAAA,SACEmzB,GACAjC,GACAE;QAEA,IAAMZ,IAAUU,EAAMV,SAChBmvB,IAAaz/C,KAAK0/C,GAAuBpvB,GAAS;QAiBxD,OA9C8BjtB,GA+Bb,MAAfo8C;;QAKYz/C,KAAK+yB,GAAc0sB,IASjCz/C,KAAK8sC,kBAAkB5b,GAChBc,GAAmB/vB;OAG5BnC,iBAAAA,SACEmzB;QAEA,OAAOjB,GAAmB/vB,QAAQjC,KAAK8sC;OAGzChtC,iBAAAA,SACEmzB,GACA/B;QAGA,OADAlxB,KAAK8sC,kBAAkB5b,GAChBc,GAAmB/vB;OAG5BnC,iBAAAA,SACEmzB,GACAxoB,GACA8lB,GACAC;QAIA,IAAMF,IAAUtwB,KAAK2/C;QACrB3/C,KAAK2/C,MAED3/C,KAAK+yB,GAAcptB,SAAS,KAChB3F,KAAK+yB,GAAc/yB,KAAK+yB,GAAcptB,SAAS;QAO/D,IAAMqrB,IAAQ,IAAI4uB,GAChBtvB,GACA7lB,GACA8lB,GACAC;QAEFxwB,KAAK+yB,GAAc1sB,KAAK2qB;;QAGxB,KAAuBR,WAAAA,OAAAA,cAAAA;YAAlB,IAAMvH;YACTjpB,KAAK6/C,KAAuB7/C,KAAK6/C,GAAqBthC,IACpD,IAAIijB,GAAavY,EAASlgB,KAAKunB,KAGjCtwB,KAAKgzB,GAAa8sB,GAChB7sB,GACAhK,EAASlgB,IAAIzB,KAAK6jB;;QAItB,OAAO6G,GAAmB/vB,QAAQ+uB;OAGpClxB,iBAAAA,SACEmzB,GACA3C;QAEA,OAAO0B,GAAmB/vB,QAAQjC,KAAK+/C,GAAkBzvB;OAG3DxwB,iBAAAA,SACEmzB,GACA3C;QAEA,IAAMqvB,IAAcrvB,IAAU,GAIxB0vB,IAAWhgD,KAAKigD,GAAeN,IAC/Bn5C,IAAQw5C,IAAW,IAAI,IAAIA;;;gBACjC,OAAOhuB,GAAmB/vB,QACxBjC,KAAK+yB,GAAcptB,SAASa,IAAQxG,KAAK+yB,GAAcvsB,KAAS;OAIpE1G,iBAAAA;QACE,OAAOkyB,GAAmB/vB,QACM,MAA9BjC,KAAK+yB,GAAcptB,UhCvIM,IgCuI2B3F,KAAK2/C,KAAc;OAI3E7/C,iBAAAA,SACEmzB;QAEA,OAAOjB,GAAmB/vB,QAAQjC,KAAK+yB,GAAc9sB;OAGvDnG,iBAAAA,SACEmzB,GACAitB;QAFFpgD,cAIQqe,IAAQ,IAAIqjB,GAAa0e,GAAa,IACtCt5C,IAAM,IAAI46B,GAAa0e,GAAapxC,OAAOqxC,oBAC3C5xC,IAA0B;QAchC,OAbAvO,KAAK6/C,GAAqB3d,GAAe,EAAC/jB,GAAOvX,MAAMg7B,SAAAA;YAKrD,IAAM5Q,IAAQhxB,EAAK+/C,GAAkBne,EAAIS;YAKzC9zB,EAAOlI,KAAK2qB;aAGPgB,GAAmB/vB,QAAQsM;OAGpCzO,iBAAAA,SACEmzB,GACAmtB;QAFFtgD,cAIMugD,IAAiB,IAAI7hC,GAAkBvZ;QAe3C,OAbAm7C,EAAaj6C,SAAQ+5C,SAAAA;YACnB,IAAM/hC,IAAQ,IAAIqjB,GAAa0e,GAAa,IACtCt5C,IAAM,IAAI46B,GAAa0e,GAAapxC,OAAOqxC;YACjDngD,EAAK6/C,GAAqB3d,GAAe,EAAC/jB,GAAOvX,MAAMg7B,SAAAA;gBAMrDye,IAAiBA,EAAe9hC,IAAIqjB,EAAIS;;aAIrCrQ,GAAmB/vB,QAAQjC,KAAKsgD,GAAoBD;OAG7DvgD,iBAAAA,SACEmzB,GACA3S;;;QAQA,IAAMigC,IAASjgC,EAAMhZ,MACfk5C,IAA8BD,EAAO56C,SAAS,GAMhD86C,IAAYF;;;;;gBACXj4C,EAAYuO,GAAc4pC,OAC7BA,IAAYA,EAAU9qC,MAAM;QAG9B,IAAMwI,IAAQ,IAAIqjB,GAAa,IAAIl5B,EAAYm4C,IAAY,IAIvDJ,IAAiB,IAAI7hC,GAAkBvZ;;;gBAmB3C,OAjBAjF,KAAK6/C,GAAqB/6B,IAAa8c,SAAAA;YACrC,IAAM8e,IAAa9e,EAAI74B,IAAIzB;YAC3B,SAAKi5C,EAAOpvC,EAAWuvC;;;;;;YAQjBA,EAAW/6C,WAAW66C,MACxBH,IAAiBA,EAAe9hC,IAAIqjB,EAAIS;YAI3ClkB,IAEI6T,GAAmB/vB,QAAQjC,KAAKsgD,GAAoBD;OAGrDvgD,iBAAAA,SAAoB6gD;QAApB7gD,cAGAyO,IAA0B;;;gBAOhC,OANAoyC,EAASx6C,SAAQmqB,SAAAA;YACf,IAAMU,IAAQhxB,EAAK+/C,GAAkBzvB;YACvB,SAAVU,KACFziB,EAAOlI,KAAK2qB;aAGTziB;OAGTzO,iBAAAA,SACEmzB,GACAjC;QAFFlxB;QArPgCuD,GA4Pb,MAFErD,KAAK0/C,GAAuB1uB,EAAMV,SAAS,aAK9DtwB,KAAK+yB,GAAcugB;QAEnB,IAAIsN,IAAa5gD,KAAK6/C;QACtB,OAAO7tB,GAAmB7rB,QAAQ6qB,EAAMR,YAAYvH,SAAAA;YAClD,IAAM2Y,IAAM,IAAIJ,GAAavY,EAASlgB,KAAKioB,EAAMV;YAEjD,OADAswB,IAAaA,EAAWrtC,OAAOquB,IACxB5hC,EAAKs/B,GAAkBuhB,GAC5B5tB,GACAhK,EAASlgB;YAEVV,MAAK;YACNrI,EAAK6/C,KAAuBe;;OAIhC9gD,iBAAAA,SAAyBwwB;;OAIzBxwB,iBAAAA,SACEs8B,GACArzB;QAEA,IAAM64B,IAAM,IAAIJ,GAAaz4B,GAAK,IAC5Bo5B,IAAWniC,KAAK6/C,GAAqBzd,GAAkBR;QAC7D,OAAO5P,GAAmB/vB,QAAQ8G,EAAIvD,QAAQ28B,KAAYA,EAASp5B;OAGrEjJ,iBAAAA,SACEs8B;QAQA,OANIp8B,KAAK+yB,GAAcptB,QAMhBqsB,GAAmB/vB;;;;;;;;;;IAWpBnC,iBAAAA,SAAuBwwB,GAAkBrU;QAM/C,OALcjc,KAAKigD,GAAe3vB;;;;;;;;;;;IAiB5BxwB,iBAAAA,SAAewwB;QACrB,OAAkC,MAA9BtwB,KAAK+yB,GAAcptB,SAEd,IAQF2qB,IADctwB,KAAK+yB,GAAc,GAAGzC;;;;;;;;;;IAQrCxwB,iBAAAA,SAAkBwwB;QACxB,IAAM9pB,IAAQxG,KAAKigD,GAAe3vB;QAClC,OAAI9pB,IAAQ,KAAKA,KAASxG,KAAK+yB,GAAcptB,SACpC,OAGK3F,KAAK+yB,GAAcvsB;;;;;;;IClUnC1G,WACmBkzB,GACA8tB;kBADA9tB,aACA8tB;;QAXX9gD,YAPD,IAAIub,GACTjT,EAAYvC;;QASN/F,YAAO;;;;;;;;WAiBPF,iBAAAA,SACNmzB,GACAvf,GACAqU;QAOA,IAAMhf,IAAM2K,EAAI3K,KACVg4C,IAAQ/gD,KAAKugB,KAAKzd,IAAIiG,IACtBi4C,IAAeD,IAAQA,EAAMx6C,OAAO,GACpC06C,IAAcjhD,KAAK8gD,GAAMptC;QAU/B,OARA1T,KAAKugB,OAAOvgB,KAAKugB,KAAK/E,GAAOzS,GAAK;YAChCm4C,IAAextC;YACfnN,MAAM06C;YACNl5B,UAAAA;YAGF/nB,KAAKuG,QAAQ06C,IAAcD,GAEpBhhD,KAAKgzB,GAAa8sB,GACvB7sB,GACAlqB,EAAIzB,KAAK6jB;;;;;;;;IAULrrB,iBAAAA,SAAYogD;QAClB,IAAMa,IAAQ/gD,KAAKugB,KAAKzd,IAAIo9C;QACxBa,MACF/gD,KAAKugB,OAAOvgB,KAAKugB,KAAK5E,OAAOukC,IAC7BlgD,KAAKuG,QAAQw6C,EAAMx6C;OAIvBzG,iBAAAA,SACEmzB,GACAitB;QAEA,IAAMa,IAAQ/gD,KAAKugB,KAAKzd,IAAIo9C;QAC5B,OAAOluB,GAAmB/vB,QAAQ8+C,IAAQA,EAAMI,KAAgB;OAGlErhD,yBAAAA,SACEmzB,GACAmtB;QAFFtgD,cAIMsxB,IAAUpS;QAKd,OAJAohC,EAAaj6C,SAAQ+5C,SAAAA;YACnB,IAAMa,IAAQ/gD,EAAKugB,KAAKzd,IAAIo9C;YAC5B9uB,IAAUA,EAAQ5V,GAAO0kC,GAAaa,IAAQA,EAAMI,KAAgB;aAE/DnvB,GAAmB/vB,QAAQmvB;OAGpCtxB,iBAAAA,SACEmzB,GACA3S,GACAuT;QAYA,KANA,IAAIzC,IAAUlS,MAIRqhC,IAAS,IAAIj4C,EAAYgY,EAAMhZ,KAAKqO,MAAM,MAC1CyrC,IAAWphD,KAAKugB,KAAKvC,GAAgBuiC;;;UACpCa,EAASnjC,QAAW;4BACnBlV,WAEJnH,aAAOs/C,UAAen5B;YAExB,KAAKzH,EAAMhZ,KAAK6J,EAAWpI,EAAIzB,OAC7B;YAEEygB,EAAS5a,EAAU0mB,MAAkB,KAGrCstB,aAAyBpvC,MAAYuO,EAAM9G,QAAQ2nC,OACrD/vB,IAAUA,EAAQ5V,GAAO2lC,EAAcp4C,KAAKo4C;;QAGhD,OAAOnvB,GAAmB/vB,QAAQmvB;OAGpCtxB,iBAAAA,SACEmzB,GACArc;QAEA,OAAOob,GAAmB7rB,QAAQnG,KAAKugB,OAAOxX,SAAAA;YAAqB6N,OAAAA,EAAE7N;;OAGvEjJ,iBAAAA,SAAgBoW;;;QAKd,OAAO,IAAImrC,EAA0BC,GAA2BthD;OAGlEF,iBAAAA,SAAQs8B;QACN,OAAOpK,GAAmB/vB,QAAQjC,KAAKuG;;;;;;;;;;;;;;;IAOvCzG,WAA6ByhD;QAA7BzhD;gBACE0B,IAAAA,2BAD2B+/C;;oBAInBzhD,iBAAAA,SACRmzB;QADQnzB,cAGF6yB,IAA4C;QAUlD,OATA3yB,KAAKqgB,GAAQla,SAAQ,SAAC4C,GAAK2K;YACrBA,IACFif,EAAStsB,KACPrG,EAAKuhD,GAAcliB,GAASpM,GAAavf,GAAK1T,EAAK+nB,aAGrD/nB,EAAKuhD,GAAcniB,GAAYr2B;aAG5BipB,GAAmBa,GAAQF;OAG1B7yB,iBAAAA,SACRmzB,GACAitB;QAEA,OAAOlgD,KAAKuhD,GAAcjuB,GAASL,GAAaitB;OAGxCpgD,iBAAAA,SACRmzB,GACAmtB;QAEA,OAAOpgD,KAAKuhD,GAAc/tB,WAAWP,GAAamtB;;;ICvLxDtgD;;;QAGEE,UAGI,IAAIs7B,IAAUvyB,SAAAA;YAAOA,OAAAA,EAAItH;aAK7BzB;;WAgBA+nB;aAWAA;YAKE,OAAO/nB,KAAKwhD;;aAhBdz5B,SAAuBnmB;YAQrB5B,KAAKwhD,KAAY5/C;;;;;;;;;;;IAiBnB9B,iBAAAA,SAASqhD,GAA8Bp5B;QACrC/nB,KAAKyhD,MACLzhD,KAAK+nB,WAAWA,GAChB/nB,KAAKqgB,GAAQ/M,IAAI6tC,EAAcp4C,KAAKo4C;;;;;;;;IAStCrhD,iBAAAA,SAAYiJ,GAAkBgf;QAC5B/nB,KAAKyhD,MACD15B,MACF/nB,KAAK+nB,WAAWA,IAElB/nB,KAAKqgB,GAAQ/M,IAAIvK,GAAK;;;;;;;;;;;;;IAcxBjJ,iBAAAA,SACEmzB,GACAitB;QAEAlgD,KAAKyhD;QACL,IAAMC,IAAgB1hD,KAAKqgB,GAAQvd,IAAIo9C;QACvC,kBAAIwB,IACK1vB,GAAmB/vB,QAA8By/C,KAEjD1hD,KAAK2hD,GAAa1uB,GAAaitB;;;;;;;;;;;;;IAe1CpgD,yBAAAA,SACEmzB,GACAmtB;QAEA,OAAOpgD,KAAK4hD,GAAgB3uB,GAAamtB;;;;;;IAO3CtgD,oBAAAA,SAAMmzB;QAGJ,OAFAjzB,KAAKyhD,MACLzhD,KAAK6hD,SACE7hD,KAAK22C,GAAa1jB;;sDAIjBnzB,iBAAAA;;;;;;;;;;;;;;;;;;;;;IC9GVA,WAA6Bq7B;QAAAn7B,mBAAAm7B;;;;QAlB7Bn7B,UAAkB,IAAIs7B,IAA8BC,SAAAA;YAAKA,OAAAA,EAAEttB;;;QAGnDjO,iCAA4BsF,EAAgByB;;QAE5C/G,uBAA4B;;QAEpCA,UAAsD;;;;;QAKtDA,UAAqB,IAAIm4C,IAEjBn4C,mBAAc,GAEtBA,UAA4Bw3B,GAAkBsqB;;WAI9ChiD,iBAAAA,SACEs8B,GACAxlB;QAGA,OADA5W,KAAK4kB,GAAQze,SAAQ,SAAC6d,GAAGG;YAAevN,OAAAA,EAAEuN;aACnC6N,GAAmB/vB;OAG5BnC,iBAAAA,SACEmzB;QAEA,OAAOjB,GAAmB/vB,QAAQjC,KAAKy/B;OAGzC3/B,iBAAAA,SACEmzB;QAEA,OAAOjB,GAAmB/vB,QAAQjC,KAAK+hD;OAGzCjiD,iBAAAA,SACEmzB;QAGA,OADAjzB,KAAKgiD,kBAAkBhiD,KAAKiiD,GAAkB55C,QACvC2pB,GAAmB/vB,QAAQjC,KAAKgiD;OAGzCliD,iBAAAA,SACEmzB,GACAivB,GACAziB;QAQA,OANIA,MACFz/B,KAAKy/B,4BAA4BA,IAE/ByiB,IAA8BliD,KAAK+hD,OACrC/hD,KAAK+hD,KAAwBG;QAExBlwB,GAAmB/vB;OAGpBnC,iBAAAA,SAAeqkB;QACrBnkB,KAAK4kB,GAAQtR,IAAI6Q,EAAW5J,QAAQ4J;QACpC,IAAM3J,IAAW2J,EAAW3J;QACxBA,IAAWxa,KAAKgiD,oBAClBhiD,KAAKiiD,KAAoB,IAAIzqB,GAAkBhd,IAC/Cxa,KAAKgiD,kBAAkBxnC,IAErB2J,EAAWzJ,iBAAiB1a,KAAK+hD,OACnC/hD,KAAK+hD,KAAwB59B,EAAWzJ;OAI5C5a,iBAAAA,SACEmzB,GACA9O;QAQA,OAFAnkB,KAAKmiD,GAAeh+B,IACpBnkB,KAAKoiD,eAAe,GACbpwB,GAAmB/vB;OAG5BnC,iBAAAA,SACEmzB,GACA9O;QAOA,OADAnkB,KAAKmiD,GAAeh+B,IACb6N,GAAmB/vB;OAG5BnC,iBAAAA,SACEmzB,GACA9O;QAUA,OAHAnkB,KAAK4kB,GAAQrR,OAAO4Q,EAAW5J,SAC/Bva,KAAK4gD,GAAW3E,GAAsB93B,EAAW3J,WACjDxa,KAAKoiD,eAAe;QACbpwB,GAAmB/vB;OAG5BnC,iBAAAA,SACEmzB,GACAovB,GACAvO;QAHFh0C,cAKMgJ,IAAQ,GACNw5C,IAA4C;QAalD,OAZAtiD,KAAK4kB,GAAQze,SAAQ,SAAC4C,GAAKob;YAEvBA,EAAWzJ,kBAAkB2nC,KACgB,SAA7CvO,EAAgBhxC,IAAIqhB,EAAW3J,cAE/Bxa,EAAK4kB,GAAQrR,OAAOxK,IACpBu5C,EAASj8C,KACPrG,EAAKuiD,GAA8BtvB,GAAa9O,EAAW3J;YAE7D1R;aAGGkpB,GAAmBa,GAAQyvB,GAAUj6C,MAAK;YAAMS,OAAAA;;OAGzDhJ,iBAAAA,SACEmzB;QAEA,OAAOjB,GAAmB/vB,QAAQjC,KAAKoiD;OAGzCtiD,iBAAAA,SACEmzB,GACA1Y;QAEA,IAAM4J,IAAankB,KAAK4kB,GAAQ9hB,IAAIyX,MAAW;QAC/C,OAAOyX,GAAmB/vB,QAAQkiB;OAGpCrkB,iBAAAA,SACEs8B,GACAvuB,GACA2M;QAGA,OADAxa,KAAK4gD,GAAW4B,GAAc30C,GAAM2M,IAC7BwX,GAAmB/vB;OAG5BnC,iBAAAA,SACEs8B,GACAvuB,GACA2M;QAEAxa,KAAK4gD,GAAW6B,GAAiB50C,GAAM2M;QACvC,IAAM8kB,IAAoBt/B,KAAKm7B,YAAYmE,IACrC3M,IAA4C;QAMlD,OALI2M,KACFzxB,EAAK1H,SAAQ4C,SAAAA;YACX4pB,EAAStsB,KAAKi5B,EAAkBuhB,GAAwBzkB,GAAKrzB;aAG1DipB,GAAmBa,GAAQF;OAGpC7yB,iBAAAA,SACEs8B,GACA5hB;QAGA,OADAxa,KAAK4gD,GAAW3E,GAAsBzhC,IAC/BwX,GAAmB/vB;OAG5BnC,iBAAAA,SACEs8B,GACA5hB;QAEA,IAAMkoC,IAAe1iD,KAAK4gD,GAAW+B,GAAgBnoC;QACrD,OAAOwX,GAAmB/vB,QAAQygD;OAGpC5iD,iBAAAA,SACEs8B,GACArzB;QAEA,OAAOipB,GAAmB/vB,QAAQjC,KAAK4gD,GAAW1E,GAAYnzC;;;;;;;;;IC3JhEjJ,WACE8iD;QADF9iD;QAfAE,UAAkE,IAGlEA,UAAkC,IAAI81B,GAAe,IAErD91B,cAaEA,KAAK6iD,SACL7iD,KAAKs/B,KAAoBsjB,EAAyB5iD;QAClDA,KAAK27B,KAAc,IAAImnB,GAAkB9iD,OAGzCA,KAAKgzB,KAAe,IAAI+vB,IACxB/iD,KAAK8yB,KAAsB,IAAIuuB,GAC7BrhD,KAAKgzB,KAJQtf,SAAAA;YACb1T,OAAAA,EAAKs/B,GAAkB0jB,GAAatvC;;;WAQxC5T,oBAAAA;QACE,OAAOkC,QAAQC;OAGjBnC,iBAAAA;;QAGE,OADAE,KAAK6iD,SACE7gD,QAAQC;OAGjBghD;aAAAA;YACE,OAAOjjD,KAAK6iD;;;;QAGd/iD,iBAAAA;;OAIAA,iBAAAA;QACE,OAAOE,KAAKgzB;OAGdlzB,iBAAAA,SAAiB+B;QACf,IAAIm0B,IAAQh2B,KAAKkjD,GAAerhD,EAAKi6C;QAQrC,OAPK9lB,MACHA,IAAQ,IAAImtB,GACVnjD,KAAKgzB,IACLhzB,KAAKs/B,KAEPt/B,KAAKkjD,GAAerhD,EAAKi6C,OAAW9lB,IAE/BA;OAGTl2B,iBAAAA;QACE,OAAOE,KAAK27B;OAGd77B,iBAAAA;QACE,OAAOE,KAAK8yB;OAGdhzB,6BAAAA,SACEmc,GACA2kB,GACAwiB;QAHFtjD;QAOE4lB,GA7FY,qBA6FM,yBAAyBzJ;QAC3C,IAAMmgB,IAAM,IAAIinB,GAAkBrjD,KAAKsjD,GAAej7C;QAEtD,OADArI,KAAKs/B,GAAkBikB,MAChBH,EAAqBhnB,GACzB/zB,MAAKkG,SAAAA;YACGvO,OAAAA,EAAKs/B,GACTkkB,GAAuBpnB,GACvB/zB,MAAK;gBAAMkG,OAAAA;;YAEfk1C,KACA1gD,MAAKwL,SAAAA;mBACJ6tB,EAAIsnB,MACGn1C;;OAIbzO,iBAAAA,SACEmzB,GACAlqB;QAEA,OAAOipB,GAAmB2xB,GACxB36C,OAAOoD,OAAOpM,KAAKkjD,IAAgBr7C,KAAImuB,SAAAA;YAAS,OAAA;gBAC9CA,OAAAA,EAAMkmB,GAAYjpB,GAAalqB;;;;;IAWrCjJ,WAAqB++B;QAArB/+B;gBACE0B,IAAAA,2BADmBq9B;;;;;;;;;;;;IvBtIvB/+B;QACEE,UAA2D;;WAI3DF,iBAAAA,SAAuBoqC;QACrBlqC,KAAK4jD,GAAqBv9C,KAAK6jC;OAGjCpqC,iBAAAA;QACEE,KAAK4jD,GAAqBz9C,SAAQ+jC,SAAAA;YAAYA,OAAAA;;;;IuB6IhDpqC,WAAqCq7B;QAAAn7B,mBAAAm7B;;QAJrCn7B,UAA4C,IAAIm4C;;QAEhDn4C,UAAsD;;kBAItDF,SAAeq7B;QACb,OAAO,IAAI0oB,EAAoB1oB;OAGjC2oB;aAAAA;YACE,IAAK9jD,KAAK+jD,IAGR,OAAO/jD,KAAK+jD;YAFZ,MAhLqDn+C;;;;QAsLzD9F,iBAAAA,SACEs8B,GACA5hB,GACAzR;QAIA,OAFA/I,KAAKgkD,GAAoBjkB,GAAah3B,GAAKyR,IAC3Cxa,KAAKikD,GAAkB1wC,OAAOxK,IACvBipB,GAAmB/vB;OAG5BnC,iBAAAA,SACEs8B,GACA5hB,GACAzR;QAIA,OAFA/I,KAAKgkD,GAAoBhkB,GAAgBj3B,GAAKyR,IAC9Cxa,KAAKikD,GAAkB1lC,IAAIxV,IACpBipB,GAAmB/vB;OAG5BnC,iBAAAA,SACEs8B,GACArzB;QAGA,OADA/I,KAAKikD,GAAkB1lC,IAAIxV,IACpBipB,GAAmB/vB;OAG5BnC,2BAAAA,SACEs8B,GACAjY;QAFFrkB;QAImBE,KAAKgkD,GAAoB/H,GACxC93B,EAAW3J,UAEJrU,SAAQ4C,SAAAA;YAAO/I,OAAAA,EAAKikD,GAAkB1lC,IAAIxV;;QACnD,IAAMm7C,IAAQlkD,KAAKm7B,YAAYS;QAC/B,OAAOsoB,EACJnjB,GAA2B3E,GAAKjY,EAAW3J,UAC3CnS,MAAKwF,SAAAA;YACJA,EAAK1H,SAAQ4C,SAAAA;gBAAO/I,OAAAA,EAAKikD,GAAkB1lC,IAAIxV;;YAEhDV,MAAK;YAAM67C,OAAAA,EAAMC,GAAiB/nB,GAAKjY;;OAG5CrkB,iBAAAA;QACEE,KAAK+jD,KAAqB,IAAItW;OAGhC3tC,iBAAAA,SACEs8B;QADFt8B,cAKQskD,IADQpkD,KAAKm7B,YAAYO,KACJ8B;;gBAC3B,OAAOxL,GAAmB7rB,QACxBnG,KAAKikD,KACJl7C,SAAAA;YACQ/I,OAAAA,EAAKqkD,GAAajoB,GAAKrzB,GAAKV,MAAKg8C,SAAAA;gBACjCA,KACHD,EAAahlB,GAAYr2B;;YAI/BV,MAAK;mBACLrI,EAAK+jD,KAAqB,MACnBK,EAAan0C,MAAMmsB;;OAI9Bt8B,iBAAAA,SACEs8B,GACArzB;QAFFjJ;QAIE,OAAOE,KAAKqkD,GAAajoB,GAAKrzB,GAAKV,MAAKg8C,SAAAA;YAClCA,IACFrkD,EAAKikD,GAAkB1wC,OAAOxK,KAE9B/I,EAAKikD,GAAkB1lC,IAAIxV;;OAKjCjJ,iBAAAA,SAAa4T;;QAEX,OAAO;OAGD5T,iBAAAA,SACNs8B,GACArzB;QAFMjJ;QAIN,OAAOkyB,GAAmB2xB,GAAG,EAC3B;YACE3xB,OAAAA,GAAmB/vB,QAAQjC,EAAKgkD,GAAoB9H,GAAYnzC;WAClE;YAAM/I,OAAAA,EAAKm7B,YAAYS,KAAiBsgB,GAAY9f,GAAKrzB;WACzD;YAAM/I,OAAAA,EAAKm7B,YAAYmpB,GAAyBloB,GAAKrzB;;;;;oCCjNzDjJ,SAAiBykD;;;;;;2BACfvkD,KAAKg4C,KAAoBh4C,KAAKwkD,GAAwBD,IACtDvkD,KAAKm7B,cAAcn7B,KAAKykD,GAAkBF,oBACpCvkD,KAAKm7B,YAAYhd;;;qCACvBne,KAAK0kD,KAAc1kD,KAAK2kD,GAAiCJ,IACzDvkD,KAAKovC,KAAapvC,KAAK4kD,GAAiBL,IACxCvkD,KAAKm3C,KAAcn3C,KAAK6kD,GAAkBN;oBAC1CvkD,KAAKyxC,KAAazxC,KAAK8kD,GAAiBP,IACxCvkD,KAAK+kD,KAAe/kD,KAAKglD,GAAmBT,IAE5CvkD,KAAKg4C,GAAkBrJ,KAAqB0F,SAAAA;wBAC1Cr0C,OAAAA,EAAKyxC,GAAWiJ,GACdrG;uBAGJr0C,KAAKm3C,GAAY1F,KAAazxC,KAAKyxC,oBAE7BzxC,KAAKovC,GAAWjxB;;;qDAChBne,KAAKg4C,GAAkB75B;;;qDACvBne,KAAKm3C,GAAYh5B;;;qDAEjBne,KAAKm3C,GAAY8N,GAAkBjlD,KAAKyxC,GAAWqH;;;;;;;OAG3Dh5C,iBAAAA,SAAmBykD;QACjB,OAAO,IAAIW,GAAallD,KAAKyxC;OAG/B3xC,iBAAAA,SACEykD;QAEA,OAAO;OAGTzkD,iBAAAA,SAAiBykD;QACf,OAAO,IAAIzlB,GACT9+B,KAAKm7B,aACL,IAAIgqB,IACJZ,EAAIlpB;OAIRv7B,iBAAAA,SAAkBykD;QAKhB,OAAO,IAAIa,GAAkBvB,GAAoBwB;OAGnDvlD,iBAAAA,SAAkBykD;QAAlBzkD;QACE,OAAO,IAAIwlD,GACTtlD,KAAKovC,IACLmV,EAAI/W,IACJ+W,EAAI7sB,KACJ2c,SAAAA;YACEr0C,OAAAA,EAAKyxC,GAAWiJ,GACdrG;YAGJkQ,EAAIv2B,SAASu3B;OAIjBzlD,iBAAAA,SAAwBykD;QACtB,OAAO,IAAIiB;OAGb1lD,iBAAAA,SAAiBykD;QACf,OAAO,IAAIkB,GACTzlD,KAAKovC,IACLpvC,KAAKm3C,IACLn3C,KAAKg4C,IACLuM,EAAIlpB,IACJkpB,EAAItM;OAIRn4C,+BAAAA,SAAiB4lD;QACf,MAAM,IAAItiD,EACRhD,EAAKW,qBA9HT;;;IC+CAjB,WACUkuB,GACA03B,GACA9Z;;;;;;;;;IASAlU;QAXA13B,gBAAAguB,aACA03B,GACA1lD,mBAAA4rC,aASAlU,GAdO13B,gBAAW2lD,GAAOC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;WAqDnC9lD,oBAAAA,SACE+lD,GACAC;QAFFhmD;QAIEE,KAAK+lD;;;;;;;QAQL,IAAMC,IAAqB,IAAInuB,IAQzBouB,IAAoB,IAAIpuB,IAE1BquB;;;;;;;;;QA4BJ,OA3BAlmD,KAAK4rC,YAAYua,GAAkBtkD,SAAAA;YACjC,KAAKqkD,GAKH,OAJAA,QAEAxgC,GAxHQ,mBAwHU,uBAAuB7jB,EAAK9B,MAEvCC,EAAKomD,GACVP,GACAC,GACAjkD,GACAokD,GACAljD,KAAKijD,EAAmB/jD,SAAS+jD,EAAmB/zB;YAEtDjyB,EAAK03B,GAAW8a,IAAiB;gBACxBxyC,OAAAA,EAAKm9C,GAAuBt7C;;;;QAMzC7B,KAAK03B,GAAWe,IAAiB;YACxButB,OAAAA,EAAmBjwB;aAMrBkwB,EAAkBlwB;;+EAI3Bj2B,4BAAAA;QAAAA;QAEE,OADAE,KAAK+lD,MACE/lD,KAAK03B,GAAWwB,SAAQ;YACtBl5B,OAAAA,EAAKyxC,GAAWf;;;;;;;;;;;;;;;;;;;;;;;qBAwBnB5wC,SACN+lD,GACAC,GACAjkD,GACAokD;;;;;;uEAO2BjmD,KAAKguB,SAASq4B,GAAermD,KAAK0lD;;;2BAArD1b,cACAv5B,IAAazQ,KAAKguB,SAASwZ,GAC/BxnC,KAAK0lD,GAAat2C,KAEdo+B,ahB9IVxD,GACA4B,GACAn7B;wBAEA,OAAO,IAAI61C,GAActc,GAAY4B,GAAan7B;sBgB0Ifu5B,GAAYhqC,KAAK4rC,aAAan7B,oBAEvDo1C,EAAkBU,WAAW;wBACjCC,IAAYxmD,KAAK03B;wBACjB+uB,IAAczmD,KAAK0lD;wBACnB13B,UAAUhuB,KAAKguB;wBACf04B,IAAAlZ;wBACAmZ,UAAU3mD,KAAK2mD;wBACfC,IAAa/kD;wBACbglD,IArMiC;wBAsMjCC,IAAAhB;;;;qCAGF9lD,KAAKm7B,cAAc0qB,EAAkB1qB,aACrCn7B,KAAKg4C,KAAoB6N,EAAkB7N,IAC3Ch4C,KAAKovC,KAAayW,EAAkBzW;oBACpCpvC,KAAKm3C,KAAc0O,EAAkB1O,IACrCn3C,KAAKyxC,KAAaoU,EAAkBpU,IACpCzxC,KAAK0kD,KAAcmB,EAAkBnB,IACrC1kD,KAAK+mD,KAAWlB,EAAkBd;;;oBAIlC/kD,KAAKm7B,YAAY6rB,IAA2BvtB;;;;;2DACpCz5B,KAAKinD;;;;;;;yBAGbhB,EAAkBhkD;;;;oBAOlB;;;oBAHAgkD,EAAkBh0B,OAAOnD,KAGpB9uB,KAAKknD,GAAYp4B,IACpB,MAAMA;oBAOR,yBALAq4B,QAAQC,KACN,+EAEEt4B;oBAEG9uB,KAAKomD,GACV,IAAIiB,IACJ;wBAAEC;uBACFzlD,GACAokD;;;;;;;;;;;;IASEnmD,iBAAAA,SAAYgvB;QAClB,OAAmB,oBAAfA,EAAMptB,OAENotB,EAAMxtB,SAASlB,EAAKW,uBACpB+tB,EAAMxtB,SAASlB,EAAKc,kBAGE,sBAAjBqmD,gBACPz4B,aAAiBy4B;;;;QAtPc,OAmQ7Bz4B,EAAMxtB,QApQgB,OAqQtBwtB,EAAMxtB;;;QAtQsB,OAyQ5BwtB,EAAMxtB;;;;;;IAWJxB,iBAAAA;QACN,IAAIE,KAAK03B,GAAW8vB,IAClB,MAAM,IAAIpkD,EACRhD,EAAKW,qBACL;OAKEjB,iBAAAA,SAAuB+B;QAI7B,OAHA7B,KAAK03B,GAAW+vB,MAEhB/hC,GApSY,mBAoSM,uCAAuC7jB,EAAK9B;QACvDC,KAAKyxC,GAAW0L,GAAuBt7C;;kFAIhD/B,6BAAAA;QAAAA;QAEE,OADAE,KAAK+lD,MACE/lD,KAAK03B,GAAWwB,SAAQ;YACtBl5B,OAAAA,EAAKyxC,GAAW2L;;OAI3Bt9C,wBAAAA;QAAAA;QACE,OAAOE,KAAK03B,GAAWgwB,IAA2BjuB;;;;;;+BAE5Cz5B,KAAK0kD,MACP1kD,KAAK0kD,GAAYzT,wBAGbjxC,KAAKm3C,GAAY/F;;;yDACjBpxC,KAAKg4C,GAAkB5G;;;yDACvBpxC,KAAKm7B,YAAYiW;;;;;;;;wBAKvBpxC,KAAK4rC,YAAY+b;;;;;;;;;;;IASrB7nD,mCAAAA;QAAAA;QACEE,KAAK+lD;QAEL,IAAMjuB,IAAW,IAAID;QAIrB,OAHA73B,KAAK03B,GAAWe,IAAiB;YACxBz4B,OAAAA,EAAKyxC,GAAWmW,GAA8B9vB;aAEhDA,EAAS/B;OAGlBj2B,qBAAAA,SACEwgB,GACA89B,GACAloC;QAHFpW;QAKEE,KAAK+lD;QACL,IAAM7b,IAAW,IAAI2d,GAAcvnC,GAAO89B,GAAUloC;QAEpD,OADAlW,KAAK03B,GAAWe,IAAiB;YAAMz4B,OAAAA,EAAK+mD,GAAShO,OAAO7O;aACrDA;OAGTpqC,iBAAAA,SAASoqC;QAATpqC;;;gBAGME,KAAK8nD,MAGT9nD,KAAK03B,GAAWe,IAAiB;YACxBz4B,OAAAA,EAAK+mD,GAASnN,GAAS1P;;wBAIlCpqC,SACE2wB;;;;;;2BAEAzwB,KAAK+lD,MACCjuB,IAAW,IAAID,oBACf73B,KAAK03B,GAAWwB,SAAQO;;;;;;uFAEHz5B,KAAKovC,GAAW2Y,GAAat3B;;;4CAA9C3e,yBACkBC,KACtB+lB,EAAS71B,QAAQ6P,KACRA,aAAoBwC,KAC7BwjB,EAAS71B,QAAQ,QAEjB61B,EAAS7F,OACP,IAAI7uB,EACFhD,EAAKgB,aACL;;;;yDAQAq6C,IAAiBhhB,GACrBzL,GACA,6BAA2ByB;oCAE7BqH,EAAS7F,OAAOwpB;;;;;;;;;;oBAIpB,mCAAO3jB,EAAS/B;;;;wBAGlBj2B,SAAiCwgB;;;;;;2BAC/BtgB,KAAK+lD,MACCjuB,IAAW,IAAID,oBACf73B,KAAK03B,GAAWwB,SAAQO;;;;;;uFAEAz5B,KAAKovC,GAAW4J,GACxC14B;;;;2CADI02B,cAIAe,IAAO,IAAIkB,GAAK34B,GAAO02B,EAAYlW,KACnCoY,IAAiBnB,EAAKd,GAAkBD,EAAYl2B,YACpDgf,IAAaiY,EAAKpB,GACtBuC;qEAGFphB,EAAS71B,QAAQ69B,EAAoBmM;;;yDAE/BwP,IAAiBhhB,GACrBzL,GACA,8BAA4B1O;oCAE9BwX,EAAS7F,OAAOwpB;;;;;;;;;;oBAGpB,mCAAO3jB,EAAS/B;;;;OAGlBj2B,oBAAAA,SAAM0wB;QAAN1wB;QACEE,KAAK+lD;QACL,IAAMjuB,IAAW,IAAID;QAIrB,OAHA73B,KAAK03B,GAAWe,IAAiB;YAC/Bz4B,OAAAA,EAAKyxC,GAAWzD,MAAMxd,GAAWsH;aAE5BA,EAAS/B;OAGlBj2B,iBAAAA;QACE,OAAOE,KAAK0lD,GAAat2C;OAG3BtP,iBAAAA,SAA2Bs+C;QAA3Bt+C;QACEE,KAAK+lD,MACL/lD,KAAK03B,GAAWe,IAAiB;mBAC/Bz4B,EAAK+mD,GAASiB,GAA2B5J,IAClCp8C,QAAQC;;OAInBnC,iBAAAA,SAA8Bs+C;QAA9Bt+C;;;gBAGME,KAAK8nD,MAGT9nD,KAAK03B,GAAWe,IAAiB;mBAC/Bz4B,EAAK+mD,GAASkB,GAA8B7J,IACrCp8C,QAAQC;;OAInBimD;aAAAA;;;;YAIE,OAAOloD,KAAK03B,GAAW8vB;;;;QAGzB1nD,0BAAAA,SACEs3C;QADFt3C;QAGEE,KAAK+lD;QACL,IAAMjuB,IAAW,IAAID;QAKrB,OAJA73B,KAAK03B,GAAWe,IAAiB;mBAC/Bz4B,EAAKyxC,GAAWtV,eAAen8B,EAAK03B,IAAY0f,GAAgBtf,IACzD91B,QAAQC;aAEV61B,EAAS/B;;;ICzelBj2B,WAAoBs+C;QAAAp+C,gBAAAo+C;;;;;QAFZp+C;;WAIRF,mBAAAA,SAAK8B;QACH5B,KAAKmoD,GAAcnoD,KAAKo+C,SAAS/1C,MAAMzG;OAGzC9B,oBAAAA,SAAMgvB;QACJ9uB,KAAKmoD,GAAcnoD,KAAKo+C,SAAStvB,OAAOA;OAG1ChvB,iBAAAA;QACEE,KAAKooD;OAGCtoD,iBAAAA,SAAiBuoD,GAA+BvN;QAAhDh7C;QACDE,KAAKooD,SACR/vB,YAAW;YACJr4B,EAAKooD,SACRC,EAAavN;YAEd;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;SCfOwN,GAAkBz/C;;;;;IAChC,OAOF,SAA8BA,GAAc0/C;QAC1C,IAAmB,mBAAR1/C,KAA4B,SAARA,GAC7B;QAIF,KADA,IAAM2/C,IAAS3/C,UACM0/C,IAbY,EAAC,QAAQ,SAAS,cAa9BA,cAAAA;YAAhB,IAAME;YACT,IAAIA,KAAUD,KAAoC,qBAAnBA,EAAOC,IACpC;;QAGJ;KAXF,CAP8B5/C;;;;ICa5B/I,WACmB2pC,GACAif,GACAC,GACAC;QAHA5oD,iBAAAypC,GACAzpC,6BAAA0oD,aACAC,GACA3oD,iBAAA4oD;;WAGnB9oD,iBAAAA,SAAa8B;QACX,QAAQoJ,EAAUpJ;UAChB;YACE,OAAO;;UACT;YACE,OAAOA,EAAMuJ;;UACf;YACE,OAAOM,EAAgB7J,EAAMiK,gBAAgBjK,EAAMmK;;UACrD;YACE,OAAO/L,KAAK6oD,GAAiBjnD,EAAqBgJ;;UACpD;YACE,OAAO5K,KAAK8oD,GAAuBlnD;;UACrC;YACE,OAAOA,EAAM2I;;UACf;YACE,OAAO,IAAI26B,GAAK55B,EAAoB1J,EAAiB2J;;UACvD;YACE,OAAOvL,KAAK+oD,GAAiBnnD,EAAqB4J;;UACpD;YACE,OAAOxL,KAAKgpD,GAAgBpnD,EAAoB8J;;UAClD;YACE,OAAO1L,KAAKipD,GAAarnD,EAAiBuK;;UAC5C;YACE,OAAOnM,KAAKkpD,GAActnD,EAAewI;;UAC3C;YACE,MAzDRxE;;OA6DU9F,iBAAAA,SAAcsK;QAAdtK,cACAyO,IAAiC;QAIvC,OAHApI,EAAQiE,EAASC,UAAU,KAAI,SAACtB,GAAKnH;YACnC2M,EAAOxF,KAAO/I,EAAKmpD,GAAavnD;aAE3B2M;OAGDzO,iBAAAA,SAAgB8B;QACtB,OAAO,IAAIynC,GACT59B,EAAgB7J,EAAM+J,WACtBF,EAAgB7J,EAAMgK;OAIlB9L,iBAAAA,SAAaqM;QAAbrM;QACN,QAAQqM,EAAWC,UAAU,IAAIvE,KAAIjG,SAAAA;YAAS5B,OAAAA,EAAKmpD,GAAavnD;;OAG1D9B,iBAAAA,SAAuB8B;QAC7B,QAAQ5B,KAAK2oD;UACX,KAAK;YACH,IAAM/4C,alExBEw5C,EAAiBxnD;gBAC/B,IAAMgO,IAAgBhO,EAAMwI,SAAUC,OAA0BwF;gBAEhE,OAAI1F,EAAkByF,KACbw5C,EAAiBx5C,KAEnBA;ckEkBsChO;YACvC,OAAqB,QAAjBgO,IACK,OAEF5P,KAAKmpD,GAAav5C;;UAC3B,KAAK;YACH,OAAO5P,KAAK6oD,GAAiBr+C,EAAkB5I;;UACjD;YACE,OAAO;;OAIL9B,iBAAAA,SAAiB8B;QACvB,IAAMynD,IAAkB3+C,EAAmB9I,IACrCyD,IAAY,IAAIf,EACpB+kD,EAAgBjlD,SAChBilD,EAAgBx+C;QAElB,OAAI7K,KAAK0oD,wBACArjD,IAEAA,EAAUikD;OAIbxpD,iBAAAA,SAAiB4B;QACvB,IAAM6nD,IAAeriD,EAAaqB,EAAW7G;QAvFrC2B,GAyFN+jB,GAAoBmiC;QAGtB,IAAMn6C,IAAa,IAAI4gB,GAAWu5B,EAAazmD,IAAI,IAAIymD,EAAazmD,IAAI,KAClEiG,IAAM,IAAIT,EAAYihD,EAAa/gD,EAAS;QAclD,OAZK4G,EAAW5J,QAAQxF,KAAKypC,UAAUC;;QAErCxuB,GACE,cAAYnS,qEAEPqG,EAAWC,kBAAaD,EAAWE,qGAEzBtP,KAAKypC,UAAUC,GAAYr6B,kBAAarP,KAAKypC,UAAUC,GAAYp6B;QAK/E,IAAIg6B,GAAkBvgC,GAAK/I,KAAKypC,WAAWzpC,KAAK4oD;;KChD9CY,KAAuB1uB,GAAUI;IA4C5Cp7B,WAAYmnC;;QACV,eAAIA,EAASrX,MAAoB;YAC/B,eAAIqX,EAASpX,KACX,MAAM,IAAIzsB,EACRhD,EAAKI,kBACL;YAGJR,KAAK4vB,OA/DU,4BAgEf5vB,KAAK6vB;eAELuT,GAAkB,YAAY,oBAAoB,QAAQ6D,EAASrX,OACnE5vB,KAAK4vB,OAAOqX,EAASrX,MAErB0T,GAA0B,YAAY,WAAW,OAAO2D,EAASpX;QACjE7vB,KAAK6vB,oBAAMoX,EAASpX;QA0DtB,IAxDA0U,GAAoB,YAAY0C,GAAU,EACxC,QACA,OACA,eACA,yBACA,kBACA,gCACA;QAGF3D,GACE,YACA,UACA,eACA2D,EAAS2E,cAEX5rC,KAAK4rC,cAAc3E,EAAS2E;QAE5BtI,GACE,YACA,WACA,yBACA2D,EAASyhB,wBAGXplB,GACE,YACA,WACA,6BACA2D,EAASX;;;eAKPW,EAASyhB,wBACXxtC,GACE,mGAGO+rB,EAASyhB,yBAClBxtC,GACE;QAIJlb,KAAK0oD,sCACHzhB,EAASyhB;QACX1oD,KAAKsmC,0CACHW,EAASX;QAEXhD,GACE,YACA,UACA,kBACA2D,EAASwiB,4BAEPxiB,EAASwiB,gBACXzpD,KAAKypD,iBAAiB3uB,GAAUG,SAC3B;YACL,IACEgM,EAASwiB,mBAAmBD,MAC5BviB,EAASwiB,iBAAiB3uB,GAAU4uB,IAEpC,MAAM,IAAItmD,EACRhD,EAAKI,kBACL,qCAAmCs6B,GAAU4uB;YAG/C1pD,KAAKypD,iBAAiBxiB,EAASwiB;;QAInCnmB,GACE,YACA,WACA,gCACA2D,EAAS0iB;QAEX3pD,KAAK8vB,iCACHmX,EAAS0iB;;WAGb7pD,sBAAAA,SAAQkF;QACN,OACEhF,KAAK4vB,SAAS5qB,EAAM4qB,QACpB5vB,KAAK6vB,QAAQ7qB,EAAM6qB,OACnB7vB,KAAK0oD,0BAA0B1jD,EAAM0jD,yBACrC1oD,KAAK4rC,gBAAgB5mC,EAAM4mC,eAC3B5rC,KAAKypD,mBAAmBzkD,EAAMykD,kBAC9BzpD,KAAK8vB,qBAAqB9qB,EAAM8qB,oBAChC9vB,KAAKsmC,8BAA8BthC,EAAMshC;;;;;;IAoC7CxmC,WACE8pD,GACAznD,GACA0jD;QAHF/lD;QAKE,qBAFA+lD,QAA2CwB,KAvB7CrnD,UAAoD;;;QAapDA,UAAkB,IAAI6pD,IAwQtB7pD,gBAAW;YACTuT,QAAQkmB;;;;;;;mCAGNz5B,KAAK8pD,sBACC9pD,KAAK+pD,GAAkB9C;;;;;;;;;;WAjQyB,mBAA5C2C,EAAgC1zC,SAAsB;;;YAGhE,IAAM8zC,IAAMJ;YACZ5pD,KAAKiqD,KAAeD,GACpBhqD,KAAK0pC,KAAcwgB,EAAUC,GAAkBH,IAC/ChqD,KAAKoqD,KAAkBJ,EAAItoD,MAC3B1B,KAAKqqD,KAAe,IAAIC,EAA4BnoD;eAC/C;YACL,IAAMooD,IAAWX;YACjB,KAAKW,EAASl7C,WACZ,MAAM,IAAIjM,EACRhD,EAAKI,kBACL;YAIJR,KAAK0pC,KAAc,IAAI1Z,GAAWu6B,EAASl7C,WAAWk7C,EAASj7C;;YAE/DtP,KAAKoqD,KAAkB,aACvBpqD,KAAKqqD,KAAe,IAAIG;;QAG1BxqD,KAAKyqD,KAAqB5E,GAC1B7lD,KAAK0qD,KAAY,IAAIC,GAAkB;;WAGzCC;aAAAA;YAYE,OAPK5qD,KAAK6qD;;YAER7qD,KAAK6qD,KAAkB,IAAIC,GACzB9qD,KAAK0pC,IACL1pC,KAAK0qD,GAAUpkB,6BAGZtmC,KAAK6qD;;;;QAGd/qD,uBAAAA,SAASirD;QACPtoB,GAA0B,sBAAsBwC,WAAW,IAC3DlC,GAAgB,sBAAsB,UAAU,GAAGgoB;QAEnD,IAAMC,IAAc,IAAIL,GAAkBI;QAC1C,IAAI/qD,KAAK+pD,OAAqB/pD,KAAK0qD,GAAUllD,QAAQwlD,IACnD,MAAM,IAAI5nD,EACRhD,EAAKW,qBACL;QAMJf,KAAK0qD,KAAYM,cACbA,EAAYpf,gBACd5rC,KAAKqqD,c3EdTze;YAEA,KAAKA,GACH,OAAO,IAAI4e;YAGb,QAAQ5e,EAAYxrB;cAClB,KAAK;gBACH,IAAM6qC,IAASrf,EAAYqf;;gCAW3B,OATA5nD,KAEsB,mBAAX4nD,KACI,SAAXA,MACAA,EAAaxoD,SACbwoD,EAAaxoD,KAAmCyoD;gBAI7C,IAAIC,EACTF,GACArf,EAAYhoC,KAAgB;;cAGhC,KAAK;gBACH,OAAOgoC,EAAYqf;;cAErB;gBACE,MAAM,IAAI7nD,EACRhD,EAAKI,kBACL;;U2EhB0CwqD,EAAYpf;OAI5D9rC,4BAAAA;QAEE,OADAE,KAAK8pD,MACE9pD,KAAK+pD,GAAkBrZ;OAGhC5wC,6BAAAA;QAEE,OADAE,KAAK8pD,MACE9pD,KAAK+pD,GAAkB3M;OAGhCt9C,gCAAAA,SAAkBmnC;;QAChB,IAAIjnC,KAAK+pD,IACP,MAAM,IAAI3mD,EACRhD,EAAKW,qBACL;QAMJ,IAAIqqD;QAcJ,OAZInkB,iBACEA,EAASokB,kCACXnwC,GACE;QAGJkwC,gCACEnkB,EAASmkB,uCACTnkB,EAASokB;QAINrrD,KAAKsrD,GAAgBtrD,KAAKyqD,IAAoB;YACnDnD;YACAmC,gBAAgBzpD,KAAK0qD,GAAUjB;YAC/B2B,iBAAAA;;sCAIJtrD;;;;gBACE,eACEE,KAAK+pD,OACJ/pD,KAAK+pD,GAAiBjC,IAEvB,MAAM,IAAI1kD,EACRhD,EAAKW,qBACL;gBAcJ,OAVM+2B,IAAW,IAAID,sBACrB73B,KAAKurD,GAAOC,IAAkC/xB;;;;;;mEAEpCisB,IAAe1lD,KAAKyrD,sBACpBzrD,KAAKyqD,GAAmBiB,iBAAiBhG;;;iDAC/C5tB,EAAS71B;;;qDAET61B,EAAS7F,OAAOjD;;;;;;;qBAGb8I,EAAS/B;;;OAGlBj2B,wBAAAA;QAEE,OADCE,KAAKgqD,IAAqB2B,uBAAuB,cAC3C3rD,KAAKmB,SAASoS;OAGvBq4C;aAAAA;YAEE,OADA5rD,KAAK8pD,MACE9pD,KAAK+pD,GAAkBjC;;;;QAGhChoD,mCAAAA;QAEE,OADAE,KAAK8pD,MACE9pD,KAAK+pD,GAAkB8B;OAKhC/rD,gCAAAA,SAAkBgsD;QAGhB,IAFA9rD,KAAK8pD,MAEDxB,GAAkBwD,IACpB,OAAO9rD,KAAK+rD,GAA0BD;QAEtC/oB,GAAgB,+BAA+B,YAAY,GAAG+oB;QAC9D,IAAM1N,IAAkC;YACtC/1C,MAAMyjD;;QAER,OAAO9rD,KAAK+rD,GAA0B3N;OAIlCt+C,iBAAAA,SACNs+C;QADMt+C,cAMAksD,IAAgB,IAAIC,GAAoB;YAC5C5jD,MAAM;gBACA+1C,EAAS/1C,QACX+1C,EAAS/1C;;YAGbymB,OATkBwD,SAAAA;gBAClB,MArbsB1sB;;;QAgcxB,OADA5F,KAAK+pD,GAAkB/B,GAA2BgE,IAC3C;YACLA,EAAcE,MACdlsD,EAAK+pD,GAAkB9B,GAA8B+D;;OAIzDlsD,iBAAAA;QAQE,OAPKE,KAAK+pD;;;QAGR/pD,KAAKsrD,GAAgB,IAAIjE,IAA2B;YAClDC;YAGGtnD,KAAK+pD;OAGNjqD,iBAAAA;QACN,OAAO,IAAIqsD,GACTnsD,KAAK0pC,IACL1pC,KAAKoqD,IACLpqD,KAAK0qD,GAAU96B,MACf5vB,KAAK0qD,GAAU76B,KACf7vB,KAAK0qD,GAAU56B;OAIXhwB,iBAAAA,SACN+lD,GACAC;QASA,IAAMJ,IAAe1lD,KAAKyrD;QAS1B,OAPAzrD,KAAK+pD,KAAmB,IAAIqC,GAC1B5iD,GAAgBC,MAChBi8C,GACA1lD,KAAKqqD,IACLrqD,KAAKurD,KAGAvrD,KAAK+pD,GAAiB5rC,MAAM0nC,GAAmBC;cAGhDhmD,SAAyBkqD;QAC/B,IA6+DcnhD,IA7+DAmhD,EAAI9zC,SAAS,cA8+DtBlN,OAAOC,UAAUC,eAAeC,KAAKN,GA9+Df,cACzB,MAAM,IAAIzF,EACRhD,EAAKI,kBACL;QA0+DR,IAAkBqI,GAt+DRwG,IAAY26C,EAAI9zC,QAAQ7G;;;;;;;;;;;;;;;;;;;;;;;;;;;WAC9B,KAAKA,KAAkC,mBAAdA,GACvB,MAAM,IAAIjM,EACRhD,EAAKI,kBACL;QAGJ,OAAO,IAAIwvB,GAAW3gB;OAGxB26C;aAAAA;YACE,KAAKhqD,KAAKiqD,IACR,MAAM,IAAI7mD,EACRhD,EAAKW,qBACL;YAIJ,OAAOf,KAAKiqD;;;;QAYdnqD,yBAAAA,SAAWusD;QAIT,OAHA5pB,GAA0B,wBAAwBwC,WAAW,IAC7DlC,GAAgB,wBAAwB,oBAAoB,GAAGspB;QAC/DrsD,KAAK8pD,MACE,IAAIwC,GAAoBplD,EAAaqB,EAAW8jD,IAAarsD;OAGtEF,kBAAAA,SAAIusD;QAIF,OAHA5pB,GAA0B,iBAAiBwC,WAAW,IACtDlC,GAAgB,iBAAiB,oBAAoB,GAAGspB;QACxDrsD,KAAK8pD,MACExgB,GAAkBijB,GAAQrlD,EAAaqB,EAAW8jD,IAAarsD;OAGxEF,8BAAAA,SAAgB2I;QAQd,IAPAg6B,GAA0B,6BAA6BwC,WAAW,IAClElC,GACE,6BACA,oBACA,GACAt6B;QAEEA,EAAalB,QAAQ,QAAQ,GAC/B,MAAM,IAAInE,EACRhD,EAAKI,kBACL,4BAA0BiI;QAK9B,OADAzI,KAAK8pD,MACE,IAAI7yC,GACT,IAAIu1C,GAActlD,EAAakO,GAAY3M,IAC3CzI;OAIJF,6BAAAA,SACEs3C;QADFt3C;QAKE,OAFA2iC,GAA0B,4BAA4BwC,WAAW,IACjElC,GAAgB,4BAA4B,YAAY,GAAGqU;QACpDp3C,KAAK8pD,KAAyB72B,aAClCA,SAAAA;YACQmkB,OAAAA,EAAe,IAAIvD,GAAY7zC,GAAMizB;;OAKlDnzB,oBAAAA;QAGE,OAFAE,KAAK8pD,MAEE,IAAI2C,GAAWzsD;OAGxBouB;aAAAA;YACE,QAAQD;cACN,KAAKK,EAASC;gBACZ,OAAO;;cACT,KAAKD,EAASk+B;gBACZ,OAAO;;cACT;;gBAEE,OAAO;;;;;wBAIb5sD,SAAmB6sD;QAGjB,QAFAlqB,GAA0B,yBAAyBwC,WAAW,IAC9DlC,GAAgB,yBAAyB,oBAAoB,GAAG4pB;QACxDA;UACN,KAAK;YACHt+B,GAAYG,EAASC;YACrB;;UACF,KAAK;YACHJ,GAAYG,EAASK;YACrB;;UACF,KAAK;YACHR,GAAYG,EAASk+B;YACrB;;UACF;YACE,MAAM,IAAItpD,EACRhD,EAAKI,kBACL,wBAAwBmsD;;;;;IAOhC7sD,iBAAAA;QACE,OAAOE,KAAK0qD,GAAUhC;;;IAQxB5oD,WACU8sD,GACAC;kBADAD,aACAC;;WAGV/sD,kBAAAA,SACEgtD;QADFhtD;QAGE2iC,GAA0B,mBAAmBwC,WAAW;QACxD,IAAMrD,IAAMmrB,GACV,mBACAD,GACA9sD,KAAK4sD;QAEP,OAAO5sD,KAAK6sD,GACTG,GAAO,EAACprB,EAAI+H,MACZ5mC,MAAMwd,SAAAA;YACL,KAAKA,KAAwB,MAAhBA,EAAK5a,QAChB,OA5oBkBC;YA8oBpB,IAAM8N,IAAM6M,EAAK;YACjB,IAAI7M,aAAeY,IACjB,OAAO,IAAI24C,GACTjtD,EAAK4sD,IACLhrB,EAAI+H,IACJ;;wCAGA/H,EAAIsrB;YAED,IAAIx5C,aAAe3B,IACxB,OAAO,IAAIk7C,GACTjtD,EAAK4sD,IACLhrB,EAAI+H,IACJj2B;;wCAGAkuB,EAAIsrB;YAGN,MAlqBkBtnD;;OAyqB1B9F,kBAAAA,SACEgtD,GACAlrD,GACAsU;QAEA2sB,GAA4B,mBAAmBoC,WAAW,GAAG;QAC7D,IAAMrD,IAAMmrB,GACV,mBACAD,GACA9sD,KAAK4sD;QAEP12C,IAAUi3C,GAAmB,mBAAmBj3C;gDACzCk3C,UAAgB7qB,UAKjB8qB,IACJn3C,EAAQo3C,SAASp3C,EAAQq3C,cACrBvtD,KAAK4sD,GAAWY,GAAYC,GAC1BlrB,GACA6qB,GACAl3C,EAAQq3C,eAEVvtD,KAAK4sD,GAAWY,GAAYE,GAC1BnrB,GACA6qB;QAGR,OADAptD,KAAK6sD,GAAav5C,IAAIsuB,EAAI+H,IAAM0jB,IACzBrtD;OAaTF,qBAAAA,SACEgtD,GACAa,GACA/rD;iBAGIggC,GACAyrB;QAgCJ,OA7B+B,mBAAtBM,KACPA,aAA6B3lB,MAE7BrF,GAA4B,sBAAsBsC,WAAW;QAC7DrD,IAAMmrB,GACJ,sBACAD,GACA9sD,KAAK4sD,KAEPS,IAASrtD,KAAK4sD,GAAWY,GAAYI,GACnC,sBACAD,GACA/rD,GACA+mC,OAGFlG,GAA0B,sBAAsBwC,WAAW;QAC3DrD,IAAMmrB,GACJ,sBACAD,GACA9sD,KAAK4sD,KAEPS,IAASrtD,KAAK4sD,GAAWY,GAAYK,GACnC,sBACAF;QAIJ3tD,KAAK6sD,GAAa3jC,OAAO0Y,EAAI+H,IAAM0jB,IAC5BrtD;OAGTF,qBAAAA,SAAOgtD;QACLrqB,GAA0B,sBAAsBwC,WAAW;QAC3D,IAAMrD,IAAMmrB,GACV,sBACAD,GACA9sD,KAAK4sD;QAGP,OADA5sD,KAAK6sD,GAAat5C,OAAOquB,EAAI+H,KACtB3pC;;;IAQTF,WAAoB8sD;kBAAAA,GAHpB5sD,UAAqB,IACrBA;;WAIAF,kBAAAA,SACEgtD,GACAlrD,GACAsU;QAEA2sB,GAA4B,kBAAkBoC,WAAW,GAAG,IAC5DjlC,KAAK8tD;QACL,IAAMlsB,IAAMmrB,GACV,kBACAD,GACA9sD,KAAK4sD;QAEP12C,IAAUi3C,GAAmB,kBAAkBj3C;+CACxCk3C,UAAgB7qB,UAKjB8qB,IACJn3C,EAAQo3C,SAASp3C,EAAQq3C,cACrBvtD,KAAK4sD,GAAWY,GAAYC,GAC1BlrB,GACA6qB,GACAl3C,EAAQq3C,eAEVvtD,KAAK4sD,GAAWY,GAAYE,GAC1BnrB,GACA6qB;QAKR,OAHAptD,KAAK+tD,KAAa/tD,KAAK+tD,GAAWj2C,OAChCu1C,EAAOpf,GAAYrM,EAAI+H,IAAM/3B,GAAagY,QAErC5pB;OAaTF,qBAAAA,SACEgtD,GACAa,GACA/rD;iBAKIggC,GACAyrB;QAkCJ,OArCArtD,KAAK8tD,MAM0B,mBAAtBH,KACPA,aAA6B3lB,MAE7BrF,GAA4B,qBAAqBsC,WAAW;QAC5DrD,IAAMmrB,GACJ,qBACAD,GACA9sD,KAAK4sD,KAEPS,IAASrtD,KAAK4sD,GAAWY,GAAYI,GACnC,qBACAD,GACA/rD,GACA+mC,OAGFlG,GAA0B,qBAAqBwC,WAAW;QAC1DrD,IAAMmrB,GACJ,qBACAD,GACA9sD,KAAK4sD,KAEPS,IAASrtD,KAAK4sD,GAAWY,GAAYK,GACnC,qBACAF;QAIJ3tD,KAAK+tD,KAAa/tD,KAAK+tD,GAAWj2C,OAChCu1C,EAAOpf,GAAYrM,EAAI+H,IAAM/3B,GAAaD,cAErC3R;OAGTF,qBAAAA,SAAOgtD;QACLrqB,GAA0B,qBAAqBwC,WAAW,IAC1DjlC,KAAK8tD;QACL,IAAMlsB,IAAMmrB,GACV,qBACAD,GACA9sD,KAAK4sD;QAKP,OAHA5sD,KAAK+tD,KAAa/tD,KAAK+tD,GAAWj2C,OAChC,IAAIvD,GAAeqtB,EAAI+H,IAAM/3B,GAAagY,QAErC5pB;OAGTF,qBAAAA;QAGE,OAFAE,KAAK8tD,MACL9tD,KAAKguD,SACDhuD,KAAK+tD,GAAWpoD,SAAS,IACpB3F,KAAK4sD,GAAW9C,KAAyB9b,MAAMhuC,KAAK+tD,MAGtD/rD,QAAQC;OAGTnC,iBAAAA;QACN,IAAIE,KAAKguD,IACP,MAAM,IAAI5qD,EACRhD,EAAKW,qBACL;;;IAcNjB,WACS6pC,GACEF,GACAyjB;kBAFFvjB,GACE3pC,iBAAAypC,aACAyjB,GAETltD,KAAK+pD,KAAmB/pD,KAAKypC,UAAUqgB;;kBAGzChqD,SACEwH,GACAmiC,GACAmf;QAEA,IAAIthD,EAAK3B,SAAS,KAAM,GACtB,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,+FAEK8G,EAAKD,gBAAyBC,EAAK3B;QAG5C,OAAO,IAAI2jC,EAAkB,IAAIhhC,EAAYhB,IAAOmiC,GAAWmf;OAGjE14B;aAAAA;YACE,OAAOlwB,KAAK2pC,GAAKriC,KAAKoN;;;;QAGxBsW;aAAAA;YACE,OAAO,IAAIshC,GACTtsD,KAAK2pC,GAAKriC,KAAK6jB,KACfnrB,KAAKypC,WACLzpC,KAAKktD;;;;QAIT5lD;aAAAA;YACE,OAAOtH,KAAK2pC,GAAKriC,KAAKD;;;;QAGxBvH,yBAAAA,SACEusD;QASA,IAPA5pB,GAA0B,gCAAgCwC,WAAW,IACrElC,GACE,gCACA,oBACA,GACAspB;SAEGA,GACH,MAAM,IAAIjpD,EACRhD,EAAKI,kBACL;QAGJ,IAAM8G,IAAOJ,EAAaqB,EAAW8jD;QACrC,OAAO,IAAIC,GAAoBtsD,KAAK2pC,GAAKriC,KAAKqO,MAAMrO,IAAOtH,KAAKypC;OAGlE3pC,sBAAAA,SAAQkF;QACN,MAAMA,aAAiBskC,IACrB,MAAM7E,GAAkB,WAAW,qBAAqB,GAAGz/B;QAE7D,OACEhF,KAAKypC,cAAczkC,EAAMykC,aACzBzpC,KAAK2pC,GAAKnkC,QAAQR,EAAM2kC,OACxB3pC,KAAKktD,OAAeloD,EAAMkoD;OAQ9BptD,kBAAAA,SAAI8B,GAAUsU;QACZ2sB,GAA4B,yBAAyBoC,WAAW,GAAG,IACnE/uB,IAAUi3C,GAAmB,yBAAyBj3C;yDAC/Ck3C,UAAgB7qB,UAKjB8qB,IACJn3C,EAAQo3C,SAASp3C,EAAQq3C,cACrBvtD,KAAKypC,UAAU+jB,GAAYC,GACzBlrB,GACA6qB,GACAl3C,EAAQq3C,eAEVvtD,KAAKypC,UAAU+jB,GAAYE,GAAanrB,GAAc6qB;QAC5D,OAAOptD,KAAK+pD,GAAiB/b,MAC3Bqf,EAAOpf,GAAYjuC,KAAK2pC,IAAM/3B,GAAagY;OAU/C9pB,qBAAAA,SACE6tD,GACA/rD;iBAGIyrD;QAqBJ,OAlB+B,mBAAtBM,KACPA,aAA6B3lB,MAE7BrF,GAA4B,4BAA4BsC,WAAW;QACnEooB,IAASrtD,KAAKypC,UAAU+jB,GAAYI,GAClC,4BACAD,GACA/rD,GACA+mC,OAGFlG,GAA0B,4BAA4BwC,WAAW;QACjEooB,IAASrtD,KAAKypC,UAAU+jB,GAAYK,GAClC,4BACAF,KAIG3tD,KAAK+pD,GAAiB/b,MAC3Bqf,EAAOpf,GAAYjuC,KAAK2pC,IAAM/3B,GAAaD;OAI/C7R,qBAAAA;QAEE,OADA2iC,GAA0B,4BAA4BwC,WAAW,IAC1DjlC,KAAK+pD,GAAiB/b,MAAM,EACjC,IAAIz5B,GAAevU,KAAK2pC,IAAM/3B,GAAagY;OAuB/C9pB,yBAAAA;;QACE+iC,GACE,gCACAoC,WACA,GACA;QAEF,IAGImZ,GAHAloC,IAA2C;YAC7CsoC;WAGEyP,IAAU;QAEa,mBAAlBv/B,EAAKu/B,MACX3F,GAAkB55B,EAAKu/B,QAGxB1pB,GAAoB,gCADpBruB,IAAUwY,EAAKu/B,IAC8C,EAC3D;QAEF3qB,GACE,gCACA,WACA,0BACAptB,EAAQsoC;QAEVyP;QAGF,IAAMC,IAAkB;YACtB1P,wBAAwBtoC,EAAQsoC;;QAgClC,OA7BI8J,GAAkB55B,EAAKu/B,MACzB7P,IAAW1vB,EAAKu/B,MAIhBlrB,GACE,gCACA,YACAkrB,GACAv/B,EAAKu/B;QAEP9qB,GACE,gCACA,YACA8qB,IAAU,GACVv/B,EAAKu/B,IAAU,KAEjB9qB,GACE,gCACA,YACA8qB,IAAU,GACVv/B,EAAKu/B,IAAU;QAEjB7P,IAAW;YACT/1C,MAAMqmB,EAAKu/B;YACXn/B,OAAOJ,EAAKu/B,IAAU;YACtBE,UAAUz/B,EAAKu/B,IAAU;YAGtBjuD,KAAKouD,GAAmBF,GAAiB9P;OAG1Ct+C,iBAAAA,SACNoW,GACAkoC;QAFMt+C,cAIFuuD,IAAc/7B,SAAAA;YAChB60B,QAAQr4B,MAAM,iCAAiCwD;;QAE7C8rB,EAAStvB,UACXu/B,IAAajQ,EAAStvB,MAAMiJ,KAAKqmB;QAGnC,IAAM4N,IAAgB,IAAIC,GAA4B;YACpD5jD,MAAM4jC,SAAAA;gBACJ,IAAImS,EAAS/1C,MAAM;oBAKjB,IAAMqL,IAAMu4B,EAAS1rB,KAAKzd,IAAI9C,EAAK2pC;oBAEnCyU,EAAS/1C,KACP,IAAI4kD,GACFjtD,EAAKypC,WACLzpC,EAAK2pC,IACLj2B,GACAu4B,EAAStrB,WACTsrB,EAAS51B,kBACTrW,EAAKktD;;;YAKbp+B,OAAOu/B;YAEHC,IAAmBtuD,KAAK+pD,GAAiBhR,OAC7CyT,GAAc3hC,GAAO7qB,KAAK2pC,GAAKriC,OAC/B0kD,GACA91C;QAGF,OAAO;YACL81C,EAAcE,MACdlsD,EAAK+pD,GAAiBnQ,GAAS0U;;OAInCxuD,kBAAAA,SAAIoW;QAAJpW;QAGE,OAFA+iC,GAA4B,yBAAyBoC,WAAW,GAAG,IACnEspB,GAAmB,yBAAyBr4C;QACrC,IAAIlU,SACT,SAACC,GAAkDgwB;YAC7C/b,KAA8B,YAAnBA,EAAQskC,SACrBx6C,EAAKypC,UACFqgB,KACA0E,GAA0BxuD,EAAK2pC,IAC/B5mC,MAAK2Q,SAAAA;gBACJzR,EACE,IAAIgrD,GACFjtD,EAAKypC,WACLzpC,EAAK2pC,IACLj2B;mCAEAA,aAAe3B,MAAW2B,EAAIyC,IAC9BnW,EAAKktD;gBAGRj7B,KAELjyB,EAAKyuD,GAAuBxsD,GAASgwB,GAAQ/b;;OAM7CpW,iBAAAA,SACNmC,GACAgwB,GACA/b;QAEA,IAAM0jC,IAAW55C,KAAKouD,GACpB;YACE5P;YACAkQ;WAEF;YACErmD,MAAOk2C,SAAAA;;;gBAGL3E,MAEK2E,EAAK5sC,UAAU4sC,EAAKoQ,SAAShuC;;;;;;;;gBAQhCsR,EACE,IAAI7uB,EACFhD,EAAKgB,aACL,4DAIJm9C,EAAK5sC,UACL4sC,EAAKoQ,SAAShuC,aACdzK,KACmB,aAAnBA,EAAQskC,SAERvoB,EACE,IAAI7uB,EACFhD,EAAKgB,aACL,gLAOJa,EAAQs8C;;YAGZzvB,OAAOmD;;OAKbnyB,4BAAAA,SACE8oD;QAEA,OAAO,IAAItf,EAAqBtpC,KAAK2pC,IAAM3pC,KAAKypC,WAAWmf;;;IAK7D9oD,WACWuW,GACAsK;QADA3gB,wBAAAqW,GACArW,iBAAA2gB;;WAGX7gB,sBAAAA,SAAQkF;QACN,OACEhF,KAAKqW,qBAAqBrR,EAAMqR,oBAChCrW,KAAK2gB,cAAc3b,EAAM2b;;;IAa7B7gB,WACU8sD,GACAjjB,GACDilB,GACCC,GACAC,GACS5B;kBALTN,aACAjjB,aACDilB,aACCC,aACAC,aACS5B;;WAGnBptD,mBAAAA,SAAKoW;QAGH,IAFA2sB,GAA4B,yBAAyBoC,WAAW,GAAG,IACnE/uB,IAAU64C,GAAwB,yBAAyB74C;QACtDlW,KAAK4uD,IAEH;;;YAGL,IAAI5uD,KAAKktD,IAAY;gBACnB,IAAMjhB,IAAW,IAAI+iB,GACnBhvD,KAAK4sD,IACL5sD,KAAK2pC,IACL3pC,KAAK4uD,IACL5uD,KAAK6uD,IACL7uD,KAAK8uD;gBAEP,OAAO9uD,KAAKktD,GAAW+B,cAAchjB,GAAU/1B;;YAQ/C,OANuB,IAAIg5C,GACzBlvD,KAAK4sD,IACL5sD,KAAK4sD,GAAWuC,MAChBj5C,EAAQk5C;qCAGYjG,GAAanpD,KAAK4uD,GAAUjnC;;OAKxD7nB,kBAAAA,SACEoR,GACAgF;QAIA,IAFA2sB,GAA4B,wBAAwBoC,WAAW,GAAG,IAClE/uB,IAAU64C,GAAwB,wBAAwB74C;QACtDlW,KAAK4uD,IAAW;YAClB,IAAMhtD,IAAQ5B,KAAK4uD,GAChBl8C,OACApB,MAAMs3B,GAAsB,wBAAwB13B;YACvD,IAAc,SAAVtP,GAOF,OANuB,IAAIstD,GACzBlvD,KAAK4sD,IACL5sD,KAAK4sD,GAAWuC,MAChBj5C,EAAQk5C,kBACRpvD,KAAKktD,IAEe/D,GAAavnD;;OAMzCsuB;aAAAA;YACE,OAAOlwB,KAAK2pC,GAAKriC,KAAKoN;;;;QAGxBktB;aAAAA;YACE,OAAO,IAAI0H,GACTtpC,KAAK2pC,IACL3pC,KAAK4sD,IACL5sD,KAAKktD;;;;QAITv7C;aAAAA;YACE,OAA0B,SAAnB3R,KAAK4uD;;;;QAGdD;aAAAA;YACE,OAAO,IAAIU,GAAiBrvD,KAAK8uD,IAAmB9uD,KAAK6uD;;;;QAG3D/uD,sBAAAA,SAAQkF;QACN,MAAMA,aAAiBioD,IACrB,MAAMxoB,GAAkB,WAAW,oBAAoB,GAAGz/B;QAE5D,OACEhF,KAAK4sD,OAAe5nD,EAAM4nD,MAC1B5sD,KAAK6uD,OAAe7pD,EAAM6pD,MAC1B7uD,KAAK2pC,GAAKnkC,QAAQR,EAAM2kC,QACJ,SAAnB3pC,KAAK4uD,KACkB,SAApB5pD,EAAM4pD,KACN5uD,KAAK4uD,GAAUppD,QAAQR,EAAM4pD,QACjC5uD,KAAKktD,OAAeloD,EAAMkoD;;;;;;WAMtBD,SAERntD,mBAAAA,SAAKoW;QAMH,OALa1U,YAAMkR,gBAAKwD;;EAHlB+2C;IAaRntD,WACSwvD,GACE7lB,GACUyjB;kBAFZoC,GACEtvD,iBAAAypC,aACUyjB;;WAGrBptD,oBAAAA,SACEwR,GACAi+C,GACA3tD;QAEA6gC,GAA0B,eAAewC,WAAW,IACpDX,GAAgB,eAAe,GAAG1iC;;QAGlC,IAYI4tD,GAFE32C,a3BloCR0pB,GACAktB,GACAt2C,GACA6pB;YAEA,KAAKysB,EAAMr/C,MAAKC,SAAAA;gBAAWA,OAAAA,MAAY2yB;iBACrC,MAAM,IAAI5/B,EACRhD,EAAKI,kBACL,mBAAiBojC,GAAiBZ,sDACHE,G2BynCgC,wC3BxnClDusB,EAAMroD,KAAK;YAG5B,OAAO47B;U2BqnCyB,GAVH,gPAUsC,GAAGusB,IAG9Dr+C,IAAY03B,GAAsB,eAAet3B;QACvD,IAAIJ,EAAUqG,KAAc;YAC1B,8CACEsB,uDACAA,GAEA,MAAM,IAAIzV,EACRhD,EAAKI,kBACL,uCAAqCqY;YAGlC,sBAAIA,GAAoB;gBAC7B7Y,KAAK0vD,GAAkC9tD,GAAOiX;gBAE9C,KADA,IAAM82C,IAA6B,WACV/tD,OAAAA,cAAAA;oBAApB,IAAMuK;oBACTwjD,EAActpD,KAAKrG,KAAK4vD,GAAqBzjD;;gBAE/CqjD,IAAa;oBAAErjD,YAAY;wBAAEC,QAAQujD;;;mBAErCH,IAAaxvD,KAAK4vD,GAAqBhuD;iCAGrCiX,uDAAsBA,KACxB7Y,KAAK0vD,GAAkC9tD,GAAOiX;QAEhD22C,IAAaxvD,KAAKypC,UAAU+jB,GAAYqC,GACtC,eACAjuD;;+CAEqBiX;QAGzB,IAAMpR,IAASiR,GAAY4U,OAAOpc,GAAW2H,GAAI22C;QAEjD,OADAxvD,KAAK8vD,GAAkBroD,IAChB,IAAIwP,EACTjX,KAAKsvD,GAAOS,GAAUtoD,IACtBzH,KAAKypC,WACLzpC,KAAKktD;OAITptD,sBAAAA,SACEwR,GACA0+C;QASA,IAAI9iC;QACJ,IARA2V,GAA4B,iBAAiBoC,WAAW,GAAG,IAC3D9B,GACE,iBACA,oBACA,GACA6sB;mBAGEA,KAA+C,UAAjBA,GAChC9iC,gCACK;YAAA,IAAqB,WAAjB8iC,GAGT,MAAM,IAAI5sD,EACRhD,EAAKI,kBACL,qDAAmDwvD;YAJrD9iC;;QAQF,IAA4B,SAAxBltB,KAAKsvD,GAAO74C,SACd,MAAM,IAAIrT,EACRhD,EAAKI,kBACL;QAIJ,IAA0B,SAAtBR,KAAKsvD,GAAO54C,OACd,MAAM,IAAItT,EACRhD,EAAKI,kBACL;QAIJ,IAAM0Q,IAAY03B,GAAsB,iBAAiBt3B,IACnDiF,IAAU,IAAIiB,GAAQtG,GAAWgc;QAEvC,OADAltB,KAAKiwD,GAAmB15C,IACjB,IAAIU,EACTjX,KAAKsvD,GAAOY,GAAW35C,IACvBvW,KAAKypC,WACLzpC,KAAKktD;OAITptD,oBAAAA,SAAM+c;QAIJ,OAHA4lB,GAA0B,eAAewC,WAAW,IACpDlC,GAAgB,eAAe,UAAU,GAAGlmB,IAC5C6nB,GAAuB,eAAe,GAAG7nB;QAClC,IAAI5F,EACTjX,KAAKsvD,GAAOa,GAAiBtzC,IAC7B7c,KAAKypC,WACLzpC,KAAKktD;OAITptD,0BAAAA,SAAY+c;QAIV,OAHA4lB,GAA0B,qBAAqBwC,WAAW,IAC1DlC,GAAgB,qBAAqB,UAAU,GAAGlmB;QAClD6nB,GAAuB,qBAAqB,GAAG7nB,IACxC,IAAI5F,EACTjX,KAAKsvD,GAAOc,GAAgBvzC,IAC5B7c,KAAKypC,WACLzpC,KAAKktD;OAITptD,sBAAAA,SACEuwD;;QAGA1tB,GAA4B,iBAAiBsC,WAAW;QACxD,IAAMjtB,IAAQhY,KAAKswD,GACjB,iBACAD,GACAhmD;;QAGF,OAAO,IAAI4M,EACTjX,KAAKsvD,GAAOiB,GAAYv4C,IACxBhY,KAAKypC,WACLzpC,KAAKktD;OAITptD,yBAAAA,SACEuwD;;QAGA1tB,GAA4B,oBAAoBsC,WAAW;QAC3D,IAAMjtB,IAAQhY,KAAKswD,GACjB,oBACAD,GACAhmD;;QAGF,OAAO,IAAI4M,EACTjX,KAAKsvD,GAAOiB,GAAYv4C,IACxBhY,KAAKypC,WACLzpC,KAAKktD;OAITptD,wBAAAA,SACEuwD;;QAGA1tB,GAA4B,mBAAmBsC,WAAW;QAC1D,IAAMjtB,IAAQhY,KAAKswD,GACjB,mBACAD,GACAhmD;;QAGF,OAAO,IAAI4M,EACTjX,KAAKsvD,GAAOkB,GAAUx4C,IACtBhY,KAAKypC,WACLzpC,KAAKktD;OAITptD,oBAAAA,SACEuwD;;QAGA1tB,GAA4B,eAAesC,WAAW;QACtD,IAAMjtB,IAAQhY,KAAKswD,GACjB,eACAD,GACAhmD;;QAGF,OAAO,IAAI4M,EACTjX,KAAKsvD,GAAOkB,GAAUx4C,IACtBhY,KAAKypC,WACLzpC,KAAKktD;OAITptD,sBAAAA,SAAQkF;QACN,MAAMA,aAAiBiS,IACrB,MAAMwtB,GAAkB,WAAW,SAAS,GAAGz/B;QAEjD,OACEhF,KAAKypC,cAAczkC,EAAMykC,aAAazpC,KAAKsvD,GAAO9pD,QAAQR,EAAMsqD;OAIpExvD,4BAAAA,SACE8oD;QAEA,OAAO,IAAI3xC,EAASjX,KAAKsvD,IAAQtvD,KAAKypC,WAAWmf;;uEAI3C9oD,iBAAAA,SACNsmC,GACAiqB,GACAhmD,GACA+O;QAGA,IADAkrB,GAAgB8B,GAAY,GAAGiqB,IAC3BA,aAAsBpD,IAAkB;YAC1C,IAAI5iD,EAAO1E,SAAS,GAClB,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,oCAAkC4lC;YAGtC,IAAMmY,IAAO8R;YACb,KAAK9R,EAAK5sC,QACR,MAAM,IAAIvO,EACRhD,EAAKM,WACL,yDACK0lC;YAGT,OAAOpmC,KAAKywD,GAAkBlS,EAAemS,IAAEt3C;;QAE/C,IAAMu3C,IAAY,EAACN,IAAYv4C,OAAOzN;QACtC,OAAOrK,KAAK4wD,GAAgBxqB,GAAYuqB,GAAWv3C;;;;;;;;;;;;;IAe/CtZ,iBAAAA,SAAkB4T,GAAe0F;;;;;;;;QAUvC,KATA,IAAMy3C,IAA0B,WASV7wD,IAAAA,KAAKsvD,GAAO/4C,SAAZvW,cAAAA;YAAjB,IAAMuW;YACT,IAAIA,EAAQjF,MAAMiG,KAChBs5C,EAAWxqD,KAAK8I,EAASnP,KAAKypC,UAAUC,IAAah2B,EAAI3K,YACpD;gBACL,IAAMnH,IAAQ8R,EAAIpC,MAAMiF,EAAQjF;gBAChC,IAAInH,EAAkBvI,IACpB,MAAM,IAAIwB,EACRhD,EAAKI,kBACL,iGAEE+V,EAAQjF,QACR;gBAGC,IAAc,SAAV1P,GAEJ;oBACL,IAAM0P,IAAQiF,EAAQjF,MAAMjK;oBAC5B,MAAM,IAAIjE,EACRhD,EAAKI,kBACL,iGACmC8Q;;gBANrCu/C,EAAWxqD,KAAKzE;;;QAYtB,OAAO,IAAIsX,GAAM23C,GAAYz3C;;;;;IAMvBtZ,iBAAAA,SACNsmC,GACAh6B,GACAgN;;QAGA,IAAM7C,IAAUvW,KAAKsvD,GAAOx4C;QAC5B,IAAI1K,EAAOzG,SAAS4Q,EAAQ5Q,QAC1B,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,oCAAkC4lC;QAOtC,KADA,IAAMyqB,IAA0B,IACvBpqD,IAAI,GAAGA,IAAI2F,EAAOzG,QAAQc,KAAK;YACtC,IAAMqqD,IAAW1kD,EAAO3F;YAExB,IADyB8P,EAAQ9P,GACZ6K,MAAMiG,KAAc;gBACvC,IAAwB,mBAAbu5C,GACT,MAAM,IAAI1tD,EACRhD,EAAKI,kBACL,yDACK4lC,8BAAkC0qB;gBAG3C,KACG9wD,KAAKsvD,GAAOv7B,SACc,MAA3B+8B,EAASvpD,QAAQ,MAEjB,MAAM,IAAInE,EACRhD,EAAKI,kBACL,2GACyB4lC,8CACnB0qB;gBAGV,IAAMxpD,IAAOtH,KAAKsvD,GAAOhoD,KAAKqO,MAAMzO,EAAaqB,EAAWuoD;gBAC5D,KAAKxoD,EAAYuO,GAAcvP,IAC7B,MAAM,IAAIlE,EACRhD,EAAKI,kBACL,iHACiD4lC,uDAClB9+B;gBAInC,IAAMyB,IAAM,IAAIT,EAAYhB;gBAC5BupD,EAAWxqD,KAAK8I,EAASnP,KAAKypC,UAAUC,IAAa3gC;mBAChD;gBACL,IAAMgoD,IAAU/wD,KAAKypC,UAAU+jB,GAAYqC,GACzCzpB,GACA0qB;gBAEFD,EAAWxqD,KAAK0qD;;;QAIpB,OAAO,IAAI73C,GAAM23C,GAAYz3C;OAsB/BtZ,yBAAAA;;QACE+iC,GAA4B,oBAAoBoC,WAAW,GAAG;QAC9D,IACImZ,GADAloC,IAA2C,IAE3C+3C,IAAU;QAyCd,OAvC2B,mBAAlBv/B,EAAKu/B,MACX3F,GAAkB55B,EAAKu/B,QAGxB1pB,GAAoB,oBADpBruB,IAAUwY,EAAKu/B,IACkC,EAC/C;QAEF3qB,GACE,oBACA,WACA,0BACAptB,EAAQsoC;QAEVyP,MAGE3F,GAAkB55B,EAAKu/B,MACzB7P,IAAW1vB,EAAKu/B,MAEhBlrB,GAAgB,oBAAoB,YAAYkrB,GAASv/B,EAAKu/B,KAC9D9qB,GACE,oBACA,YACA8qB,IAAU,GACVv/B,EAAKu/B,IAAU;QAEjB9qB,GACE,oBACA,YACA8qB,IAAU,GACVv/B,EAAKu/B,IAAU,KAEjB7P,IAAW;YACT/1C,MAAMqmB,EAAKu/B;YACXn/B,OAAOJ,EAAKu/B,IAAU;YACtBE,UAAUz/B,EAAKu/B,IAAU;YAG7BjuD,KAAKgxD,GAAyChxD,KAAKsvD,KAC5CtvD,KAAKouD,GAAmBl4C,GAASkoC;OAGlCt+C,iBAAAA,SACNoW,GACAkoC;QAFMt+C,cAIFuuD,IAAc/7B,SAAAA;YAChB60B,QAAQr4B,MAAM,iCAAiCwD;;QAE7C8rB,EAAStvB,UACXu/B,IAAajQ,EAAStvB,MAAMiJ,KAAKqmB;QAGnC,IAAM4N,IAAgB,IAAIC,GAA4B;YACpD5jD,MAAOkG,SAAAA;gBACD6vC,EAAS/1C,QACX+1C,EAAS/1C,KACP,IAAI4oD,GACFjxD,EAAKypC,WACLzpC,EAAKsvD,IACL/gD,GACAvO,EAAKktD;;YAKbp+B,OAAOu/B;YAGH6C,IAAkBlxD,KAAKypC,UAAUqgB,MACjCwE,IAAmB4C,EAAgBnY,OACvC/4C,KAAKsvD,IACLtD,GACA91C;QAEF,OAAO;YACL81C,EAAcE,MACdgF,EAAgBtX,GAAS0U;;OAIrBxuD,iBAAAA,SAAyCwgB;QAC/C,IAAIA,EAAM+0B,QAAqD,MAAjC/0B,EAAMxJ,GAAgBnR,QAClD,MAAM,IAAIvC,EACRhD,EAAKc,eACL;OAKNpB,kBAAAA,SAAIoW;QAAJpW;QAIE,OAHA+iC,GAA4B,aAAaoC,WAAW,GAAG,IACvDspB,GAAmB,aAAar4C,IAChClW,KAAKgxD,GAAyChxD,KAAKsvD,KAC5C,IAAIttD,SACT,SAACC,GAA+CgwB;YAC1C/b,KAA8B,YAAnBA,EAAQskC,SACrBx6C,EAAKypC,UACFqgB,KACAqH,GAA2BnxD,EAAKsvD,IAChCvsD,MAAM66C,SAAAA;gBACL37C,EACE,IAAIgvD,GACFjxD,EAAKypC,WACLzpC,EAAKsvD,IACL1R,GACA59C,EAAKktD;gBAGRj7B,KAELjyB,EAAKyuD,GAAuBxsD,GAASgwB,GAAQ/b;;OAM7CpW,iBAAAA,SACNmC,GACAgwB,GACA/b;QAEA,IAAM0jC,IAAW55C,KAAKouD,GACpB;YACE5P;YACAkQ;WAEF;YACErmD,MAAOkG,SAAAA;;;gBAGLqrC,KAGErrC,EAAOogD,SAAShuC,aAChBzK,KACmB,aAAnBA,EAAQskC,SAERvoB,EACE,IAAI7uB,EACFhD,EAAKgB,aACL,mLAOJa,EAAQsM;;YAGZugB,OAAOmD;;;;;;;;IAULnyB,iBAAAA,SAAqBsxD;QAC3B,IAA+B,mBAApBA,GAA8B;YACvC,IAAwB,OAApBA,GACF,MAAM,IAAIhuD,EACRhD,EAAKI,kBACL;YAIJ,KACGR,KAAKsvD,GAAOv7B,SACqB,MAAlCq9B,EAAgB7pD,QAAQ,MAExB,MAAM,IAAInE,EACRhD,EAAKI,kBACL,qHAEM4wD;YAGV,IAAM9pD,IAAOtH,KAAKsvD,GAAOhoD,KAAKqO,MAC5BzO,EAAaqB,EAAW6oD;YAE1B,KAAK9oD,EAAYuO,GAAcvP,IAC7B,MAAM,IAAIlE,EACRhD,EAAKI,kBACL,8IAEU8G,4DAA0DA,EAAK3B;YAG7E,OAAOwJ,EAASnP,KAAKypC,UAAUC,IAAa,IAAIphC,EAAYhB;;QACvD,IAAI8pD,aAA2B9nB,IAAmB;YACvD,IAAM1H,IAAMwvB;YACZ,OAAOjiD,EAASnP,KAAKypC,UAAUC,IAAa9H,EAAI+H;;QAEhD,MAAM,IAAIvmC,EACRhD,EAAKI,kBACL,mIAEKojC,GAAiBwtB;;;;;;IASpBtxD,iBAAAA,SACN8B,GACAyvD;QAEA,KAAKjtB,MAAM50B,QAAQ5N,MAA2B,MAAjBA,EAAM+D,QACjC,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,uDACM6wD,EAAS5vD;QAGnB,IAAIG,EAAM+D,SAAS,IACjB,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,qBAAmB6wD,EAAS5vD;QAIhC,IAAIG,EAAM2F,QAAQ,SAAS,GACzB,MAAM,IAAInE,EACRhD,EAAKI,kBACL,qBAAmB6wD,EAAS5vD;QAIhC,IAAIG,EAAM6F,QAAO4I,SAAAA;YAAWvB,OAAAA,OAAO7C,MAAMoE;YAAU1K,SAAS,GAC1D,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,qBAAmB6wD,EAAS5vD;OAM1B3B,iBAAAA,SAAkB2H;QACxB,IAAIA,aAAkBiR,IAAa;YACjC,IAAM44C,IAAW,2FACXC,IAAiB,mEACjBC,IAAYF,EAAS/pD,QAAQE,EAAOoR,OAAO,GAC3C44C,IAAkBF,EAAehqD,QAAQE,EAAOoR,OAAO;YAE7D,IAAIpR,EAAOkR,MAAgB;gBACzB,IAAM+4C,IAAgB1xD,KAAKsvD,GAAOl4C;gBAClC,IAAsB,SAAlBs6C,MAA2BA,EAAclsD,QAAQiC,EAAO6J,QAC1D,MAAM,IAAIlO,EACRhD,EAAKI,kBACL,0IAE6BkxD,EAAcjwD,yBAChCgG,EAAO6J,MAAM7P;gBAI5B,IAAM4V,IAAoBrX,KAAKsvD,GAAOh4C;gBACZ,SAAtBD,KACFrX,KAAK2xD,GACHlqD,EAAO6J,OACP+F;mBAGC,IAAIo6C,KAAmBD,GAAW;;;gBAGvC,IAAII,IAAiC;gBAOrC,IANIH,MACFG,IAAgB5xD,KAAKsvD,GAAOuC,GAAmBN,KAE3B,SAAlBK,KAA0BJ,MAC5BI,IAAgB5xD,KAAKsvD,GAAOuC,GAAmBP,KAE5B,QAAjBM;;gBAEF,MAAIA,MAAkBnqD,EAAOoR,KACrB,IAAIzV,EACRhD,EAAKI,kBACL,kDACMiH,EAAOoR,GAAGpX,4BAGZ,IAAI2B,EACRhD,EAAKI,kBACL,oCAAkCiH,EAAOoR,GAAGpX,kCACjCmwD,EAAcnwD;;;OAQ7B3B,iBAAAA,SAAmByW;QACzB,IAA2C,SAAvCvW,KAAKsvD,GAAOh4C,MAAiC;;YAE/C,IAAMH,IAAkBnX,KAAKsvD,GAAOl4C;YACZ,SAApBD,KACFnX,KAAK2xD,GAAkCx6C,GAAiBZ,EAAQjF;;OAK9DxR,iBAAAA,SACNgyD,GACAv7C;QAEA,KAAKA,EAAQ/Q,QAAQssD,IACnB,MAAM,IAAI1uD,EACRhD,EAAKI,kBACL,2FACiCsxD,EAAWrwD,8CACbqwD,EAAWrwD,+FAExB8U,EAAQ9U;;;IAahC3B,WACmB8sD,GACAmF,GACAC,GACA9E;kBAHAN,aACAmF,aACAC,aACA9E,GATnBltD,UAAoE,MACpEA,UAA+D;QAU7DA,KAAK2uD,WAAW,IAAIU,GAClB2C,EAAU37C,kBACV27C,EAAUrxC;;WAIdJ;aAAAA;YACE,IAAMhS,IAAoD;YAE1D,OADAvO,KAAKmG,SAAQuN,SAAAA;gBAAOnF,OAAAA,EAAOlI,KAAKqN;iBACzBnF;;;;QAGT0E;aAAAA;YACE,OAAOjT,KAAKgyD,GAAUzxC,KAAKnX;;;;QAG7B7C;aAAAA;YACE,OAAOvG,KAAKgyD,GAAUzxC,KAAKha;;;;QAG7BzG,sBAAAA,SACEyxB,GACA0gC;QAFFnyD;QAIE+iC,GAA4B,yBAAyBoC,WAAW,GAAG,IACnElC,GAAgB,yBAAyB,YAAY,GAAGxR;QACxDvxB,KAAKgyD,GAAUzxC,KAAKpa,SAAQuN,SAAAA;YAC1B6d,EAASpoB,KAAK8oD,GAASjyD,EAAKkyD,GAAsBx+C;;OAItD4M;aAAAA;YACE,OAAO,IAAIrJ,GAAMjX,KAAK+xD,IAAgB/xD,KAAK4sD,IAAY5sD,KAAKktD;;;;QAG9DptD,yBAAAA,SACEoW;QAEIA,MACFquB,GAAoB,4BAA4BruB,GAAS,EACvD,6BAEFotB,GACE,4BACA,WACA,0BACAptB,EAAQsoC;QAIZ,IAAMA,OACJtoC,MAAWA,EAAQsoC;QAGrB,IAAIA,KAA0Bx+C,KAAKgyD,GAAUnxC,IAC3C,MAAM,IAAIzd,EACRhD,EAAKI,kBACL;QAkBJ,OAZGR,KAAKmyD,MACNnyD,KAAKoyD,OAAyC5T,MAE9Cx+C,KAAKmyD;;;;;;iBAmNT1oB,GACA+U,GACAvS,GACA2c;YAEA,IAAI3c,EAASzrB,GAAQpX,KAAW;;;gBAG9B,IACI5C,IAAQ;gBACZ,OAAOylC,EAASxrB,WAAW5Y,KAAIoY,SAAAA;oBAC7B,IAAMvM,IAAM,IAAIs7C,GACdvlB,GACAxpB,EAAOvM,IAAI3K,KACXkX,EAAOvM,KACPu4B,EAAStrB,WACTsrB,EAASvrB,GAAYpC,IAAI2B,EAAOvM,IAAI3K,MACpC6/C;oBAWF,OADU3oC,EAAOvM,KACV;wBACL0M,MAAM;wBACN1M,KAAAA;wBACA2+C,WAAW;wBACXC,UAAU9rD;;;;;;YAMd,IAAI+rD,IAAetmB,EAASzrB;YAC5B,OAAOyrB,EAASxrB,WACbhZ,QACCwY,SAAAA;gBAAUu+B,OAAAA,0BAA0Bv+B,EAAOG;gBAE5CvY,KAAIoY,SAAAA;gBACH,IAAMvM,IAAM,IAAIs7C,GACdvlB,GACAxpB,EAAOvM,IAAI3K,KACXkX,EAAOvM,KACPu4B,EAAStrB,WACTsrB,EAASvrB,GAAYpC,IAAI2B,EAAOvM,IAAI3K,MACpC6/C,IAEEyJ,KAAY,GACZC,KAAY;gBAUhB,yBATIryC,EAAOG,SACTiyC,IAAWE,EAAahrD,QAAQ0Y,EAAOvM,IAAI3K,MAE3CwpD,IAAeA,EAAah/C,OAAO0M,EAAOvM,IAAI3K;oCAE5CkX,EAAOG,SAETkyC,KADAC,IAAeA,EAAah0C,IAAI0B,EAAOvM,MACfnM,QAAQ0Y,EAAOvM,IAAI3K,OAEtC;oBAAEqX,MAAMoyC,GAAiBvyC,EAAOG;oBAAO1M,KAAAA;oBAAK2+C,UAAAA;oBAAUC,UAAAA;;;UAjR7DtyD,KAAK4sD,IACLpO,GACAx+C,KAAKgyD,IACLhyD,KAAKktD,KAEPltD,KAAKoyD,KAAuC5T,IAGvCx+C,KAAKmyD;;+DAIdryD,sBAAAA,SAAQkF;QACN,MAAMA,aAAiBisD,IACrB,MAAMxsB,GAAkB,WAAW,iBAAiB,GAAGz/B;QAGzD,OACEhF,KAAK4sD,OAAe5nD,EAAM4nD,MAC1B5sD,KAAK+xD,GAAevsD,QAAQR,EAAM+sD,OAClC/xD,KAAKgyD,GAAUxsD,QAAQR,EAAMgtD,OAC7BhyD,KAAKktD,OAAeloD,EAAMkoD;OAItBptD,iBAAAA,SAAsB4T;QAC5B,OAAO,IAAIs7C,GACThvD,KAAK4sD,IACLl5C,EAAI3K,KACJ2K,GACA1T,KAAK2uD,SAAShuC,WACd3gB,KAAKgyD,GAAUtxC,GAAYpC,IAAI5K,EAAI3K,MACnC/I,KAAKktD;;;IAOTptD,WACW2yD,GACThpB,GACAyjB;QAHFptD;QAME,KADA0B,IAAAA,aAAMgrD,GAAc3hC,GAAO4nC,IAAQhpB,GAAWyjB,iBAJrCuF,GAKLA,EAAM9sD,SAAS,KAAM,GACvB,MAAM,IAAIvC,EACRhD,EAAKI,kBACL,kGAEKiyD,EAAMprD,gBAAyBorD,EAAM9sD;;;WAbmBsR,SAkBnEiZ;aAAAA;YACE,OAAOlwB,KAAKsvD,GAAOhoD,KAAKoN;;;;QAG1BsW;aAAAA;YACE,IAAMoM,IAAap3B,KAAKsvD,GAAOhoD,KAAK6jB;YACpC,OAAIiM,EAAWhuB,MACN,OAEA,IAAIkgC,GACT,IAAIhhC,EAAY8uB,IAChBp3B,KAAKypC;;;;QAKXniC;aAAAA;YACE,OAAOtH,KAAKsvD,GAAOhoD,KAAKD;;;;QAG1BvH,kBAAAA,SAAIusD;QAaF,IAZAxpB,GAA4B,2BAA2BoC,WAAW,GAAG;;;QAG5C,MAArBA,UAAUt/B,WACZ0mD,IAAa1G,GAAOC,OAEtB7iB,GACE,2BACA,oBACA,GACAspB;QAEiB,OAAfA,GACF,MAAM,IAAIjpD,EACRhD,EAAKI,kBACL;QAGJ,IAAM8G,IAAOJ,EAAaqB;QAC1B,OAAO+gC,GAAkBijB,GACvBvsD,KAAKsvD,GAAOhoD,KAAKqO,MAAMrO,IACvBtH,KAAKypC,WACLzpC,KAAKktD;OAITptD,kBAAAA,SAAI8B;QACF6gC,GAA0B,2BAA2BwC,WAAW,IAIhElC,GAAgB,2BAA2B,UAAU,GAH9B/iC,KAAKktD,KACxBltD,KAAKktD,GAAWwF,YAAY9wD,KAC5BA;QAEJ,IAAM+wD,IAAS3yD,KAAK0T;QACpB,OAAOi/C,EAAOr/C,IAAI1R,GAAOmB,MAAK;YAAM4vD,OAAAA;;OAGtC7yD,4BAAAA,SACE8oD;QAEA,OAAO,IAAI0D,EAAuBtsD,KAAKyyD,IAAOzyD,KAAKypC,WAAWmf;;EA9EG3xC;;;;;;;;;;;;;;;;;;uBAkFrE;SAASk2C,GACP/mB,GACAlwB;IAEA,eAAIA,GACF,OAAO;QACLo3C;;IAeJ,IAXA/oB,GAAoB6B,GAAYlwB,GAAS,EAAC,SAAS,kBACnDotB,GAA0B8C,GAAY,WAAW,SAASlwB,EAAQo3C,iB3B3lElE/qB,GACAc,GACAuvB,GACA5vB,GACA6vB;mBAEI7vB,cAjCJT,GACAc,GACAuvB,GACA5vB,GACA6vB;YAEA,MAAM7vB,aAAoBoB,QACxB,MAAM,IAAIhhC,EACRhD,EAAKI,kBACL,cAAY+hC,yBAA+Bc,6CACHO,GAAiBZ;YAI7D,KAAK,IAAIv8B,IAAI,GAAGA,IAAIu8B,EAASr9B,UAAUc,GACrC,KAAKosD,EAAU7vB,EAASv8B,KACtB,MAAM,IAAIrD,EACRhD,EAAKI,kBACL,cAAY+hC,yBAA+Bc,yBACvBuvB,kCAA2CnsD,eACrDm9B,GAAiBZ,EAASv8B;UAetC87B,GACAc,GACAuvB,GACA5vB,GACA6vB;K2BglEJC,CACE1sB,GACA,eACA,2BACAlwB,EAAQq3C,cACRl9C,SAAAA;QACqB,OAAA,mBAAZA,KAAwBA,aAAmB23B;oBAGlD9xB,EAAQq3C,0BAA6Br3C,EAAQo3C,OAC/C,MAAM,IAAIlqD,EACRhD,EAAKI,kBACL,wCAAsC4lC;IAK1C,OAAOlwB;;;AAGT,SAAS64C,GACP3oB,GACAlwB;IAEA,kBAAIA,IACK,MAGTquB,GAAoB6B,GAAYlwB,GAAS,EAAC,uBAC1CqtB,GACE6C,GACA,GACA,oBACAlwB,EAAQk5C,kBACR,EAAC,YAAY,YAAY;IAEpBl5C;;;AAGT,SAASq4C,GACPnoB,GACAlwB;IAEAitB,GAAwBiD,GAAY,UAAU,GAAGlwB,IAC7CA,MACFquB,GAAoB6B,GAAYlwB,GAAS,EAAC,aAC1CqtB,GACE6C,GACA,GACA,UACAlwB,EAAQskC,QACR,EAAC,WAAW,UAAU;;;AAK5B,SAASuS,GACP3mB,GACA0mB,GACArjB;IAEA,IAAMqjB,aAAuBxjB,IAEtB;QAAA,IAAIwjB,EAAYrjB,cAAcA,GACnC,MAAM,IAAIrmC,EACRhD,EAAKI,kBACL;QAGF,OAAOssD;;IAPP,MAAMroB,GAAkB2B,GAAY,qBAAqB,GAAG0mB;;;AAqFhE,SAAS0F,GAAiBpyC;IACxB,QAAQA;MACN;QACE,OAAO;;MACT;MACA;QACE,OAAO;;MACT;QACE,OAAO;;MACT;QACE,OAp8EsBxa;;;;;;;;;;;;aAi9EnBmtD,GACPnK,GACAhnD,GACA2gC;IAEA,IAAI6qB;IAOJ,OANIxE,KACFwE,IAAiBxE,EAAU8J,YAAY9wD,IACvC2gC,IAAe,sBAAsBA,KAErC6qB,IAAiBxrD,GAEZ,EAACwrD,GAAgB7qB;;;SCv9EVywB,GACdC,GACAC;IAEA,SAASC;QACP,IAAIrkC,IAAQ;QAKZ,MAJIokC,MACFpkC,KAAS,KACTA,KAASokC,IAEL,IAAI9vD,EAAehD,EAAKI,kBAAkBsuB;;;;;QAWlD,OANAqkC,EAAkBlqD,YAAYgqD,EAAIhqD;;IAGlCD,OAAOoqD,OAAOD,GAAmBF,IAG1BE;;;;;;;;;;;;;;;;;;;;;ICTIE,KAAkBL,GAC7B9I,IACA,sCAEWoJ,KAAoBN,GAC/Bnf,IACA,uDAEW0f,KAAmBP,GAC9BvG,IACA,8CAEW+G,KAA0BR,GACrC1pB,IACA,4CAEWmqB,KAAyBT,GAAuB/F,KAChDyG,KAA8BV,GACzChE,KAEW2E,KAAcX,GAAuB/7C,KACrC28C,KAAsBZ,GAAuB/B,KAC7C4C,KAA4Bb,GACvC1G,IACA,mDAEWwH,KAAmBd;;sB1ByH9BlzD;QAEE,OADAwiC,GAAe,qBAAqB2C,YAC7B,IAAIa;2BAGbhmC;QAEE,OADAwiC,GAAe,8BAA8B2C,YACtC,IAAIc;sBAGbjmC;;;;gBAIE,OAHA6iC,GAA4B,yBAAyBsC,WAAW,IAGzD,IAAI8uB,GAAyB/jD;uBAGtClQ;;;;gBAIE,OAHA6iC,GAA4B,0BAA0BsC,WAAW,IAG1D,IAAI+uB,GAA0BhkD;qBAGvClQ,SAAiB+c;QAGf,OAFAkmB,GAAgB,wBAAwB,UAAU,GAAGlmB,IACrD4lB,GAA0B,wBAAwBwC,WAAW;QACtD,IAAIgvB,GAA+Bp3C;OAG5C/c,sBAAAA,SAAQkF;QACN,OAAOhF,SAASgF;;K0BtJlB,sCAEWkvD,KAAalB,GACxB9tB,IACA,kEAGIivB,KAAqB;IACzBjK,WAAWmJ;IACXhqB,UAAAA;IACA/kC,WAAAA;IACA4gC,MAAMgvB;IACNrgB,aAAayf;IACb7G,YAAY8G;IACZjqB,mBAAmBkqB;IACnBvG,kBAAkBwG;IAClBx8C,OAAO08C;IACP3E,uBAAuB0E;IACvBzC,eAAe2C;IACftH,qBAAqBuH;eACrBlsD;IACAysD,YAAYN;IACZzlC,aAAa67B,GAAU77B;IACvBm7B,sBAAAA;;;WCzEA1pD,iBAAAA,SAAYyxB;;OAIZzxB,iBAAAA;;;;ICeAA;QAAAA;QANAE,UAA4C;YAC1CA,OAAAA,EAAKq0D;WACPr0D,UAA8C;YAC5CA,OAAAA,EAAKs0D;WACPt0D,UAAmD,IAGjDA,KAAKu0D;;WAGPz0D,iBAAAA,SAAYyxB;QACVvxB,KAAKu7C,GAAUl1C,KAAKkrB;OAGtBzxB,iBAAAA;QACE+4B,OAAOQ,oBAAoB,UAAUr5B,KAAKw0D,KAC1C37B,OAAOQ,oBAAoB,WAAWr5B,KAAKy0D;OAGrC30D,iBAAAA;QACN+4B,OAAOC,iBAAiB,UAAU94B,KAAKw0D,KACvC37B,OAAOC,iBAAiB,WAAW94B,KAAKy0D;OAGlC30D,iBAAAA;QACN4lB,GA/BY,uBA+BM;QAClB,KAAuB1lB,WAAAA,IAAAA,KAAKu7C,IAALv7C,cAAAA;aACrBuxB;;OAIIzxB,iBAAAA;QACN4lB,GAtCY,uBAsCM;QAClB,KAAuB1lB,WAAAA,IAAAA,KAAKu7C,IAALv7C,cAAAA;aACrBuxB;;;;;;WAOJzxB;QACE,OACoB,sBAAX+4B,qBACPA,OAAOC,+BACPD,OAAOQ;;;IC3CXv5B,WAAY4uB;QACV1uB,KAAK00D,KAAShmC,EAAKgmC,IACnB10D,KAAK20D,KAAUjmC,EAAKimC;;WAGtB70D,iBAAAA,SAAOyxB;QAELvxB,KAAK40D,KAAgBrjC;OAGvBzxB,iBAAAA,SAAQyxB;QAENvxB,KAAK60D,KAAiBtjC;OAGxBzxB,wBAAAA,SAAUyxB;QAERvxB,KAAK80D,KAAmBvjC;OAG1BzxB,oBAAAA;QACEE,KAAK20D;OAGP70D,mBAAAA,SAAKyuB;QACHvuB,KAAK00D,GAAOnmC;OAGdzuB,iBAAAA;QAKEE,KAAK40D;OAGP90D,iBAAAA,SAAYwyB;QAKVtyB,KAAK60D,GAAeviC;OAGtBxyB,iBAAAA,SAAcyuB;QAKZvuB,KAAK80D,GAAiBvmC;;KCvBpBwmC,KAAmD;IACzDC,mBAA6C;IAC7CC,QAAkC;GAK5BC,KAA0B,iBAAiBt1D;IAS/CE,WAAYq1D;QACVn1D,KAAKoP,KAAa+lD,EAAK/lD;QACvB,IAAMqF,IAAQ0gD,EAAKtlC,MAAM,UAAU;QACnC7vB,KAAKo1D,KAAU3gD,IAAQ,QAAQ0gD,EAAKvlC,MACpC5vB,KAAK8vB,mBAAmBqlC,EAAKrlC;;;;;kBAOvBhwB,iBAAAA,SACNiE,GACAqnC;QAEA,IAAIA,GACF,KAAK,IAAMiqB,KAAUjqB,EAAMtpC,GACrBspC,EAAMtpC,EAAYoH,eAAemsD,OACnCtxD,EAAQsxD,KAAUjqB,EAAMtpC,EAAYuzD;QAI1CtxD,EAAQ,uBAAuBmxD;OAGjCp1D,iBAAAA,SACEutC,GACAjB,GACAhB;QAHFtrC,cAKQw1D,IAAMt1D,KAAKu1D,GAAQloB;QAEzB,OAAO,IAAIrrC,SAAQ,SAACC,GAAyBgwB;YAC3C,IAAMujC,IAAM,IAAIC;YAChBD,EAAIE,WAAWC,EAAUC,WAAU;gBACjC;oBACE,QAAQJ,EAAIK;sBACV,KAAKC,EAAUC;wBACb,IAAMC,IAAOR,EAAIS;wBACjBvwC,GAhEE,cAgEgB,iBAAiBwe,KAAKC,UAAU6xB,KAClD/zD,EAAQ+zD;wBACR;;sBACF,KAAKF,EAAUI;wBACbxwC,GApEE,cAoEgB,UAAU2nB,IAAU,gBACtCpb,EACE,IAAI7uB,EAAehD,EAAKK,mBAAmB;wBAE7C;;sBACF,KAAKq1D,EAAUK;wBACb,IAAM3vC,IAASgvC,EAAIY;wBAQnB,IAPA1wC,GA3EE,cA6EA,UAAU2nB,IAAU,yBACpB7mB,GACA,kBACAgvC,EAAIa;wBAEF7vC,IAAS,GAAG;4BACd,IAAM8vC,IAAiBd,EAAIS,kBACxBnnC;4BACH,IACIwnC,KACAA,EAAc9vC,UACd8vC,EAAc/0D,SAChB;gCACA,IAAMg1D,apEwK2B/vC;oCACjD,IAAMgwC,IAAchwC,EAAOiwC,cAAc1uD,QAAQ,KAAK;oCACtD,OAAOiB,OAAOoD,OAAOhM,GAAMmH,QAAQivD,MAAwB,IACtDA,IACDp2D,EAAKG;kCoE3KS+1D,EAAc9vC;gCAEhByL,EACE,IAAI7uB,EACFmzD,GACAD,EAAc/0D;mCAIlB0wB,EACE,IAAI7uB,EACFhD,EAAKG,SACL,kCAAkCi1D,EAAIY;;;;wBAO5C1wC,GA9GA,cA8GkB,UAAU2nB,IAAU,aACtCpb,EACE,IAAI7uB,EAAehD,EAAKgB,aAAa;wBAGzC;;sBACF;wBACEwE;;;oBAYJ8f,GAjIM,cAiIY,UAAU2nB,IAAU;;;;;;YAO1C,IAAMqpB,IAAW1tD,kBAAKojC;mBACfsqB,EAAQpnD;YAEf,IAAMqnD,IAAgBzyB,KAAKC,UAAUuyB;YACrChxC,GA5IU,cA4IQ,iBAAiB4vC,IAAM,MAAMqB;;;;;;YAM/C,IAAM5yD,IAAqB;gBAAE6yD,gBAAgB;;YAE7C52D,EAAK62D,GAAwB9yD,GAASqnC,IAEtCoqB,EAAI7qB,KAAK2qB,GAAK,QAAQqB,GAAe5yD,GApIlB;;OAwIvBjE,iBAAAA,SACEutC,GACAjB,GACAhB;;;QAIA,OAAOprC,KAAKstC,GAAuBD,GAASjB,GAAShB;OAGvDtrC,iBAAAA,SACEutC,GACAjC;QAEA,IAAM0rB,IAAW,EACf92D,KAAKo1D,IACL,KAxKqB,iCA0KrB,KACA/nB,GACA,cAEI0pB,IAAsBC,KACtB5qB,IAA6B;;;YAGjC6qB,oBAAoB;YACpBC,oBAAoB;YACpBC,kBAAkB;;;gBAGhB7nD,UAAU,cAAYtP,KAAKoP,GAAWC,4BAAuBrP,KAAKoP,GAAWE;;YAE/E8nD;YACAC;YACAC,uBAAuB;;;;;;;gBAOrBC,gCAAgC;;YAElCznC,kBAAkB9vB,KAAK8vB;;QAGzB9vB,KAAK62D,GAAwBzqB,EAA2B8qB,oBAAE9rB;;;;;;;;;;;;;;;;QAoBvDosB,OACAC,OACAC,OACAC,OACAC,OACAC,QAEDzrB,EAAQ0rB,4BAA4B;QAGtC,IAAMxC,IAAMwB,EAAS1vD,KAAK;QAC1Bse,GAxOY,cAwOM,0BAA0B4vC,IAAM,MAAMlpB;QACxD,IAAM2rB,IAAUhB,EAAoBiB,iBAAiB1C,GAAKlpB,IAOtD6rB,QAKAC,QAEEC,IAAe,IAAIC,GAAwB;YAC/CC,IAAS9pC,SAAAA;gBACF2pC,IASHxyC,GAlQM,cAkQY,6CAA6C6I,MAR1D0pC,MACHvyC,GA3PI,cA2Pc;gBAClBqyC,EAAQO,QACRL,SAEFvyC,GA/PM,cA+PY,uBAAuB6I,IACzCwpC,EAAQptB,KAAKpc;;YAKjBgqC,IAAS;gBAAMR,OAAAA,EAAQ1tB;;YAOnBmuB,IAAuB,SAC3Bp4C,GACAzZ;;;YAIAoxD,EAAQhf,OAAO34B,IAAOq4C,SAAAA;gBACpB;oBACE9xD,EAAG8xD;kBACH,OAAOzpC;oBACPqJ,YAAW;wBACT,MAAMrJ;wBACL;;;;;;;;;;;;;QAuFT,OAlFAwpC,EAAqBE,EAAW/C,UAAUgD,OAAM;YACzCT,KACHxyC,GA/RQ,cA+RU;aAItB8yC,EAAqBE,EAAW/C,UAAUiD,QAAO;YAC1CV,MACHA,QACAxyC,GAtSQ,cAsSU,gCAClByyC,EAAaU;aAIjBL,EAA4BE,EAAW/C,UAAU9mC,QAAOyD,SAAAA;YACjD4lC,MACHA,QACAxyC,GA9SQ,cA8SU,iCAAiC4M,IACnD6lC,EAAaU,GACX,IAAIz1D,EACFhD,EAAKgB,aACL;aAaRo3D,EACEE,EAAW/C,UAAUmD,UACrBvqC,SAAAA;;YACE,KAAK2pC,GAAQ;gBACX,IAAMa,IAAUxqC,EAAK7b,KAAK;gBAjUcrP,KAkU3B01D;;;;;;gBAMb,IAAMC,IAA2CD,GAC3CjqC,IACJkqC,EAAelqC,wBACdkqC,EAAqC,iCAAIlqC;gBAC5C,IAAIA,GAAO;oBACTpJ,GA/UI,cA+Uc,8BAA8BoJ;;oBAEhD,IAAMtI,IAAiBsI,EAAMtI,QACzBllB,apEvRqBklB;;;wBAGnC,IAAMllB,IAAgB0U,GAAQwQ;wBAC9B,eAAIllB,GAIJ,OAAO2Z,GAAmB3Z;sBoE+QgBklB,IAC5BjlB,IAAUutB,EAAMvtB;+BAChBD,MACFA,IAAOlB,EAAKe,UACZI,IACE,2BACAilB,IACA,mBACAsI,EAAMvtB;;oBAGV22D,QACAC,EAAaU,GAAY,IAAIz1D,EAAe9B,GAAMC,KAClDw2D,EAAQ1tB;uBAER3kB,GAjWI,cAiWc,wBAAwBqzC,IAC1CZ,EAAac,GAAcF;;aAMnC1gC,YAAW;;;;;YAKT8/B,EAAae;YACZ,IACIf;;;IAITr4D,iBAAAA,SAAQutC;QACN,IAAM8rB,IAAapE,GAAsB1nB;QAKzC,OACErtC,KAAKo1D,KACL,kBAGAp1D,KAAKoP,GAAWC,YAChB,gBACArP,KAAKoP,GAAWE,WAChB,gBACA6pD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;SCtZUC,GAAkB5uC;;;;;;;;cLwEhC3qB,GACAw5D;QAKCx5D,EAAgCsB,SAASm4D,kBACxC,IAAIC,EACF,cACAC,SAAAA;YAEE,OKhFJ,SAACxP,GAAKvnD;gBAAS,OAAA,IAAIynD,GAAUF,GAAKvnD,GAAM,IAAI4kD;aLgFjCgS,CADKG,EAAUC,YAAY,OAAO/2D,gBACZ82D,EAAUC,YAAY;mCAGrDC,kCAAqBvF;MKpFvB3pC,IAGFA,EAASmvC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACVXnwD,GAAgBowD,GAAY;ICM1B95D;QACEE,KAAK8kC,KAAkC,sBAATp7B;;WAGhCsb;aAAAA;;;YAGE,OAA2B,sBAAbA,WAA2BA,WAAW;;;;QAGtD6T;aAAAA;;;YAGE,OAAyB,sBAAXA,SAAyBA,SAAS;;;;QAGlD/4B,iBAAAA,SAAe4lD;QACb,OAAO1jD,QAAQC,QAAQ,IAAI43D,GAAqBnU;OAGlD5lD,iBAAAA;QACE,OAAIg6D,GAA2BC,OACtB,IAAID,KAEJ,IAAIE;OAIfl6D,iBAAAA,SAAcsP;QACZ,OAAO,IAAI6qD,GAAoB7qD,GAAY;YAAE8qD;;OAG/Cp6D,iBAAAA,SAAW8B;QACT,OAAOsiC,KAAKC,UAAUviC;OAGxB9B,mBAAAA,SAAKq6D;QACH,OAAOzwD,KAAKywD;OAGdr6D,mBAAAA,SAAKs6D;QACH,OAAOvwD,KAAKuwD;OAGdt6D,iBAAAA,SAAYu6D;;QAIV,IAAMC;;QAEY,sBAATC,SAAyBA,KAAKD,UAAWC,KAAuBC,WACnE1zC,IAAQ,IAAI/c,WAAWswD;QAC7B,IAAIC,GACFA,EAAOG,gBAAgB3zC;;QAGvB,KAAK,IAAIrgB,IAAI,GAAGA,IAAI4zD,GAAQ5zD,KAC1BqgB,EAAMrgB,KAAK5B,KAAKC,MAAsB,MAAhBD,KAAKmyB;QAG/B,OAAOlQ;;OFrDXsyC,GAAkBv5D;;"} |