Close httplib2 connections.
delete(project, instance, database, x__xgafv=None)
Deletes a database from a Cloud SQL instance.
get(project, instance, database, x__xgafv=None)
Retrieves a resource containing information about a database inside a Cloud SQL instance.
insert(project, instance, body=None, x__xgafv=None)
Inserts a resource containing information about a database inside a Cloud SQL instance. **Note:** You can't modify the default character set and collation.
list(project, instance, x__xgafv=None)
Lists databases in the specified Cloud SQL instance.
patch(project, instance, database, body=None, x__xgafv=None)
Partially updates a resource containing information about a database inside a Cloud SQL instance. This method supports patch semantics.
update(project, instance, database, body=None, x__xgafv=None)
Updates a resource containing information about a database inside a Cloud SQL instance.
close()
Close httplib2 connections.
delete(project, instance, database, x__xgafv=None)
Deletes a database from a Cloud SQL instance. Args: project: string, Project ID of the project that contains the instance. (required) instance: string, Database instance ID. This does not include the project ID. (required) database: string, Name of the database to be deleted in the instance. (required) x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource. "acquireSsrsLeaseContext": { # Acquire SSRS lease context. # The context for acquire SSRS lease operation, if applicable. "duration": "A String", # Lease duration needed for SSRS setup. "reportDatabase": "A String", # The report database to be used for SSRS setup. "serviceLogin": "A String", # The username to be used as the service login to connect to the report database for SSRS setup. "setupLogin": "A String", # The username to be used as the setup login to connect to the database server for SSRS setup. }, "apiWarning": { # An Admin API warning message. # An Admin API warning message. "code": "A String", # Code to uniquely identify the warning type. "message": "A String", # The warning message. "region": "A String", # The region name for REGION_UNREACHABLE warning. }, "backupContext": { # Backup context. # The context for backup operation, if applicable. "backupId": "A String", # The identifier of the backup. "kind": "A String", # This is always `sql#backupContext`. }, "endTime": "A String", # The time this operation finished in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated. "errors": [ # The list of errors encountered while processing this operation. { # Database instance operation error. "code": "A String", # Identifies the specific error that occurred. "kind": "A String", # This is always `sql#operationError`. "message": "A String", # Additional information about the error encountered. }, ], "kind": "A String", # This is always `sql#operationErrors`. }, "exportContext": { # Database instance export context. # The context for export operation, if applicable. "bakExportOptions": { # Options for exporting BAK files (SQL Server-only) "bakType": "A String", # Type of this bak file will be export, FULL or DIFF, SQL Server only "copyOnly": True or False, # Deprecated: copy_only is deprecated. Use differential_base instead "differentialBase": True or False, # Whether or not the backup can be used as a differential base copy_only backup can not be served as differential base "exportLogEndTime": "A String", # Optional. The end timestamp when transaction log will be included in the export operation. [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in UTC. When omitted, all available logs until current time will be included. Only applied to Cloud SQL for SQL Server. "exportLogStartTime": "A String", # Optional. The begin timestamp when transaction log will be included in the export operation. [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in UTC. When omitted, all available logs from the beginning of retention period will be included. Only applied to Cloud SQL for SQL Server. "stripeCount": 42, # Option for specifying how many stripes to use for the export. If blank, and the value of the striped field is true, the number of stripes is automatically chosen. "striped": True or False, # Whether or not the export should be striped. }, "csvExportOptions": { # Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only. "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped. "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file. "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values. "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted. "selectQuery": "A String", # The select query used to extract the data. }, "databases": [ # Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is specified, all databases are exported, except for the `mysql` system database. If `fileType` is `CSV`, you can specify one database, either by using this property or by using the `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL instances:` You must specify one database to be exported. If `fileType` is `CSV`, this database must match the one specified in the `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database to be exported, and the `fileType` must be `BAK`. "A String", ], "fileType": "A String", # The file type for the specified uri. "kind": "A String", # This is always `sql#exportContext`. "offload": True or False, # Option for export offload. "sqlExportOptions": { # Options for exporting data as SQL statements. "mysqlExportOptions": { # Options for exporting from MySQL. "masterData": 42, # Option to include SQL statement required to set up replication. If set to `1`, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. If set to any value other than `1`, --set-gtid-purged is set to OFF. }, "parallel": True or False, # Optional. Whether or not the export should be parallel. "postgresExportOptions": { # Options for exporting from a Cloud SQL for PostgreSQL instance. "clean": True or False, # Optional. Use this option to include DROP SQL statements. These statements are used to delete database objects before running the import operation. "ifExists": True or False, # Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by clean. }, "schemaOnly": True or False, # Export only schemas. "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table. "A String", ], "threads": 42, # Optional. The number of threads to use for parallel export. }, "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are compressed. }, "importContext": { # Database instance import context. # The context for import operation, if applicable. "bakImportOptions": { # Import parameters specific to SQL Server .BAK files "bakType": "A String", # Type of the bak content, FULL or DIFF "encryptionOptions": { "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file. "pvkPassword": "A String", # Password that encrypts the private key "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file. }, "noRecovery": True or False, # Whether or not the backup importing will restore database with NORECOVERY option Applies only to Cloud SQL for SQL Server. "recoveryOnly": True or False, # Whether or not the backup importing request will just bring database online without downloading Bak content only one of "no_recovery" and "recovery_only" can be true otherwise error will return. Applies only to Cloud SQL for SQL Server. "stopAt": "A String", # Optional. The timestamp when the import should stop. This timestamp is in the [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`). This field is equivalent to the STOPAT keyword and applies to Cloud SQL for SQL Server only. "stopAtMark": "A String", # Optional. The marked transaction where the import should stop. This field is equivalent to the STOPATMARK keyword and applies to Cloud SQL for SQL Server only. "striped": True or False, # Whether or not the backup set being restored is striped. Applies only to Cloud SQL for SQL Server. }, "csvImportOptions": { # Options for importing data as CSV. "columns": [ # The columns to which CSV data is imported. If not specified, all columns of the database table are loaded with CSV data. "A String", ], "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped. "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file. "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values. "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted. "table": "A String", # The table to which CSV data is imported. }, "database": "A String", # The target database for the import. If `fileType` is `SQL`, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If `fileType` is `CSV`, one database must be specified. "fileType": "A String", # The file type for the specified uri.\`SQL`: The file contains SQL statements. \`CSV`: The file contains CSV data. "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only. "kind": "A String", # This is always `sql#importContext`. "sqlImportOptions": { # Optional. Options for importing data from SQL statements. "parallel": True or False, # Optional. Whether or not the import should be parallel. "postgresImportOptions": { # Optional. Options for importing from a Cloud SQL for PostgreSQL instance. "clean": True or False, # Optional. The --clean flag for the pg_restore utility. This flag applies only if you enabled Cloud SQL to import files in parallel. "ifExists": True or False, # Optional. The --if-exists flag for the pg_restore utility. This flag applies only if you enabled Cloud SQL to import files in parallel. }, "threads": 42, # Optional. The number of threads to use for parallel import. }, "uri": "A String", # Path to the import file in Cloud Storage, in the form `gs://bucketName/fileName`. Compressed gzip files (.gz) are supported when `fileType` is `SQL`. The instance must have write permissions to the bucket and read access to the file. }, "insertTime": "A String", # The time this operation was enqueued in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "kind": "A String", # This is always `sql#operation`. "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation. "operationType": "A String", # The type of the operation. Valid values are: * `CREATE` * `DELETE` * `UPDATE` * `RESTART` * `IMPORT` * `EXPORT` * `BACKUP_VOLUME` * `RESTORE_VOLUME` * `CREATE_USER` * `DELETE_USER` * `CREATE_DATABASE` * `DELETE_DATABASE` "selfLink": "A String", # The URI of this resource. "startTime": "A String", # The time this operation actually started in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "status": "A String", # The status of an operation. "targetId": "A String", # Name of the database instance related to this operation. "targetLink": "A String", "targetProject": "A String", # The project ID of the target instance related to this operation. "user": "A String", # The email address of the user who initiated this operation. }
get(project, instance, database, x__xgafv=None)
Retrieves a resource containing information about a database inside a Cloud SQL instance. Args: project: string, Project ID of the project that contains the instance. (required) instance: string, Database instance ID. This does not include the project ID. (required) database: string, Name of the database in the instance. (required) x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # Represents a SQL database on the Cloud SQL instance. "charset": "A String", # The Cloud SQL charset value. "collation": "A String", # The Cloud SQL collation value. "etag": "A String", # This field is deprecated and will be removed from a future version of the API. "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID. "kind": "A String", # This is always `sql#database`. "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name. "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable. "selfLink": "A String", # The URI of this resource. "sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance. "compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible "recoveryModel": "A String", # The recovery model of a SQL Server database }, }
insert(project, instance, body=None, x__xgafv=None)
Inserts a resource containing information about a database inside a Cloud SQL instance. **Note:** You can't modify the default character set and collation. Args: project: string, Project ID of the project that contains the instance. (required) instance: string, Database instance ID. This does not include the project ID. (required) body: object, The request body. The object takes the form of: { # Represents a SQL database on the Cloud SQL instance. "charset": "A String", # The Cloud SQL charset value. "collation": "A String", # The Cloud SQL collation value. "etag": "A String", # This field is deprecated and will be removed from a future version of the API. "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID. "kind": "A String", # This is always `sql#database`. "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name. "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable. "selfLink": "A String", # The URI of this resource. "sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance. "compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible "recoveryModel": "A String", # The recovery model of a SQL Server database }, } x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource. "acquireSsrsLeaseContext": { # Acquire SSRS lease context. # The context for acquire SSRS lease operation, if applicable. "duration": "A String", # Lease duration needed for SSRS setup. "reportDatabase": "A String", # The report database to be used for SSRS setup. "serviceLogin": "A String", # The username to be used as the service login to connect to the report database for SSRS setup. "setupLogin": "A String", # The username to be used as the setup login to connect to the database server for SSRS setup. }, "apiWarning": { # An Admin API warning message. # An Admin API warning message. "code": "A String", # Code to uniquely identify the warning type. "message": "A String", # The warning message. "region": "A String", # The region name for REGION_UNREACHABLE warning. }, "backupContext": { # Backup context. # The context for backup operation, if applicable. "backupId": "A String", # The identifier of the backup. "kind": "A String", # This is always `sql#backupContext`. }, "endTime": "A String", # The time this operation finished in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated. "errors": [ # The list of errors encountered while processing this operation. { # Database instance operation error. "code": "A String", # Identifies the specific error that occurred. "kind": "A String", # This is always `sql#operationError`. "message": "A String", # Additional information about the error encountered. }, ], "kind": "A String", # This is always `sql#operationErrors`. }, "exportContext": { # Database instance export context. # The context for export operation, if applicable. "bakExportOptions": { # Options for exporting BAK files (SQL Server-only) "bakType": "A String", # Type of this bak file will be export, FULL or DIFF, SQL Server only "copyOnly": True or False, # Deprecated: copy_only is deprecated. Use differential_base instead "differentialBase": True or False, # Whether or not the backup can be used as a differential base copy_only backup can not be served as differential base "exportLogEndTime": "A String", # Optional. The end timestamp when transaction log will be included in the export operation. [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in UTC. When omitted, all available logs until current time will be included. Only applied to Cloud SQL for SQL Server. "exportLogStartTime": "A String", # Optional. The begin timestamp when transaction log will be included in the export operation. [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in UTC. When omitted, all available logs from the beginning of retention period will be included. Only applied to Cloud SQL for SQL Server. "stripeCount": 42, # Option for specifying how many stripes to use for the export. If blank, and the value of the striped field is true, the number of stripes is automatically chosen. "striped": True or False, # Whether or not the export should be striped. }, "csvExportOptions": { # Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only. "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped. "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file. "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values. "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted. "selectQuery": "A String", # The select query used to extract the data. }, "databases": [ # Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is specified, all databases are exported, except for the `mysql` system database. If `fileType` is `CSV`, you can specify one database, either by using this property or by using the `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL instances:` You must specify one database to be exported. If `fileType` is `CSV`, this database must match the one specified in the `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database to be exported, and the `fileType` must be `BAK`. "A String", ], "fileType": "A String", # The file type for the specified uri. "kind": "A String", # This is always `sql#exportContext`. "offload": True or False, # Option for export offload. "sqlExportOptions": { # Options for exporting data as SQL statements. "mysqlExportOptions": { # Options for exporting from MySQL. "masterData": 42, # Option to include SQL statement required to set up replication. If set to `1`, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. If set to any value other than `1`, --set-gtid-purged is set to OFF. }, "parallel": True or False, # Optional. Whether or not the export should be parallel. "postgresExportOptions": { # Options for exporting from a Cloud SQL for PostgreSQL instance. "clean": True or False, # Optional. Use this option to include DROP SQL statements. These statements are used to delete database objects before running the import operation. "ifExists": True or False, # Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by clean. }, "schemaOnly": True or False, # Export only schemas. "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table. "A String", ], "threads": 42, # Optional. The number of threads to use for parallel export. }, "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are compressed. }, "importContext": { # Database instance import context. # The context for import operation, if applicable. "bakImportOptions": { # Import parameters specific to SQL Server .BAK files "bakType": "A String", # Type of the bak content, FULL or DIFF "encryptionOptions": { "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file. "pvkPassword": "A String", # Password that encrypts the private key "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file. }, "noRecovery": True or False, # Whether or not the backup importing will restore database with NORECOVERY option Applies only to Cloud SQL for SQL Server. "recoveryOnly": True or False, # Whether or not the backup importing request will just bring database online without downloading Bak content only one of "no_recovery" and "recovery_only" can be true otherwise error will return. Applies only to Cloud SQL for SQL Server. "stopAt": "A String", # Optional. The timestamp when the import should stop. This timestamp is in the [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`). This field is equivalent to the STOPAT keyword and applies to Cloud SQL for SQL Server only. "stopAtMark": "A String", # Optional. The marked transaction where the import should stop. This field is equivalent to the STOPATMARK keyword and applies to Cloud SQL for SQL Server only. "striped": True or False, # Whether or not the backup set being restored is striped. Applies only to Cloud SQL for SQL Server. }, "csvImportOptions": { # Options for importing data as CSV. "columns": [ # The columns to which CSV data is imported. If not specified, all columns of the database table are loaded with CSV data. "A String", ], "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped. "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file. "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values. "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted. "table": "A String", # The table to which CSV data is imported. }, "database": "A String", # The target database for the import. If `fileType` is `SQL`, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If `fileType` is `CSV`, one database must be specified. "fileType": "A String", # The file type for the specified uri.\`SQL`: The file contains SQL statements. \`CSV`: The file contains CSV data. "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only. "kind": "A String", # This is always `sql#importContext`. "sqlImportOptions": { # Optional. Options for importing data from SQL statements. "parallel": True or False, # Optional. Whether or not the import should be parallel. "postgresImportOptions": { # Optional. Options for importing from a Cloud SQL for PostgreSQL instance. "clean": True or False, # Optional. The --clean flag for the pg_restore utility. This flag applies only if you enabled Cloud SQL to import files in parallel. "ifExists": True or False, # Optional. The --if-exists flag for the pg_restore utility. This flag applies only if you enabled Cloud SQL to import files in parallel. }, "threads": 42, # Optional. The number of threads to use for parallel import. }, "uri": "A String", # Path to the import file in Cloud Storage, in the form `gs://bucketName/fileName`. Compressed gzip files (.gz) are supported when `fileType` is `SQL`. The instance must have write permissions to the bucket and read access to the file. }, "insertTime": "A String", # The time this operation was enqueued in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "kind": "A String", # This is always `sql#operation`. "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation. "operationType": "A String", # The type of the operation. Valid values are: * `CREATE` * `DELETE` * `UPDATE` * `RESTART` * `IMPORT` * `EXPORT` * `BACKUP_VOLUME` * `RESTORE_VOLUME` * `CREATE_USER` * `DELETE_USER` * `CREATE_DATABASE` * `DELETE_DATABASE` "selfLink": "A String", # The URI of this resource. "startTime": "A String", # The time this operation actually started in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "status": "A String", # The status of an operation. "targetId": "A String", # Name of the database instance related to this operation. "targetLink": "A String", "targetProject": "A String", # The project ID of the target instance related to this operation. "user": "A String", # The email address of the user who initiated this operation. }
list(project, instance, x__xgafv=None)
Lists databases in the specified Cloud SQL instance. Args: project: string, Project ID of the project that contains the instance. (required) instance: string, Cloud SQL instance ID. This does not include the project ID. (required) x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # Database list response. "items": [ # List of database resources in the instance. { # Represents a SQL database on the Cloud SQL instance. "charset": "A String", # The Cloud SQL charset value. "collation": "A String", # The Cloud SQL collation value. "etag": "A String", # This field is deprecated and will be removed from a future version of the API. "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID. "kind": "A String", # This is always `sql#database`. "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name. "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable. "selfLink": "A String", # The URI of this resource. "sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance. "compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible "recoveryModel": "A String", # The recovery model of a SQL Server database }, }, ], "kind": "A String", # This is always `sql#databasesList`. }
patch(project, instance, database, body=None, x__xgafv=None)
Partially updates a resource containing information about a database inside a Cloud SQL instance. This method supports patch semantics. Args: project: string, Project ID of the project that contains the instance. (required) instance: string, Database instance ID. This does not include the project ID. (required) database: string, Name of the database to be updated in the instance. (required) body: object, The request body. The object takes the form of: { # Represents a SQL database on the Cloud SQL instance. "charset": "A String", # The Cloud SQL charset value. "collation": "A String", # The Cloud SQL collation value. "etag": "A String", # This field is deprecated and will be removed from a future version of the API. "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID. "kind": "A String", # This is always `sql#database`. "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name. "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable. "selfLink": "A String", # The URI of this resource. "sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance. "compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible "recoveryModel": "A String", # The recovery model of a SQL Server database }, } x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource. "acquireSsrsLeaseContext": { # Acquire SSRS lease context. # The context for acquire SSRS lease operation, if applicable. "duration": "A String", # Lease duration needed for SSRS setup. "reportDatabase": "A String", # The report database to be used for SSRS setup. "serviceLogin": "A String", # The username to be used as the service login to connect to the report database for SSRS setup. "setupLogin": "A String", # The username to be used as the setup login to connect to the database server for SSRS setup. }, "apiWarning": { # An Admin API warning message. # An Admin API warning message. "code": "A String", # Code to uniquely identify the warning type. "message": "A String", # The warning message. "region": "A String", # The region name for REGION_UNREACHABLE warning. }, "backupContext": { # Backup context. # The context for backup operation, if applicable. "backupId": "A String", # The identifier of the backup. "kind": "A String", # This is always `sql#backupContext`. }, "endTime": "A String", # The time this operation finished in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated. "errors": [ # The list of errors encountered while processing this operation. { # Database instance operation error. "code": "A String", # Identifies the specific error that occurred. "kind": "A String", # This is always `sql#operationError`. "message": "A String", # Additional information about the error encountered. }, ], "kind": "A String", # This is always `sql#operationErrors`. }, "exportContext": { # Database instance export context. # The context for export operation, if applicable. "bakExportOptions": { # Options for exporting BAK files (SQL Server-only) "bakType": "A String", # Type of this bak file will be export, FULL or DIFF, SQL Server only "copyOnly": True or False, # Deprecated: copy_only is deprecated. Use differential_base instead "differentialBase": True or False, # Whether or not the backup can be used as a differential base copy_only backup can not be served as differential base "exportLogEndTime": "A String", # Optional. The end timestamp when transaction log will be included in the export operation. [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in UTC. When omitted, all available logs until current time will be included. Only applied to Cloud SQL for SQL Server. "exportLogStartTime": "A String", # Optional. The begin timestamp when transaction log will be included in the export operation. [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in UTC. When omitted, all available logs from the beginning of retention period will be included. Only applied to Cloud SQL for SQL Server. "stripeCount": 42, # Option for specifying how many stripes to use for the export. If blank, and the value of the striped field is true, the number of stripes is automatically chosen. "striped": True or False, # Whether or not the export should be striped. }, "csvExportOptions": { # Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only. "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped. "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file. "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values. "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted. "selectQuery": "A String", # The select query used to extract the data. }, "databases": [ # Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is specified, all databases are exported, except for the `mysql` system database. If `fileType` is `CSV`, you can specify one database, either by using this property or by using the `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL instances:` You must specify one database to be exported. If `fileType` is `CSV`, this database must match the one specified in the `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database to be exported, and the `fileType` must be `BAK`. "A String", ], "fileType": "A String", # The file type for the specified uri. "kind": "A String", # This is always `sql#exportContext`. "offload": True or False, # Option for export offload. "sqlExportOptions": { # Options for exporting data as SQL statements. "mysqlExportOptions": { # Options for exporting from MySQL. "masterData": 42, # Option to include SQL statement required to set up replication. If set to `1`, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. If set to any value other than `1`, --set-gtid-purged is set to OFF. }, "parallel": True or False, # Optional. Whether or not the export should be parallel. "postgresExportOptions": { # Options for exporting from a Cloud SQL for PostgreSQL instance. "clean": True or False, # Optional. Use this option to include DROP SQL statements. These statements are used to delete database objects before running the import operation. "ifExists": True or False, # Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by clean. }, "schemaOnly": True or False, # Export only schemas. "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table. "A String", ], "threads": 42, # Optional. The number of threads to use for parallel export. }, "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are compressed. }, "importContext": { # Database instance import context. # The context for import operation, if applicable. "bakImportOptions": { # Import parameters specific to SQL Server .BAK files "bakType": "A String", # Type of the bak content, FULL or DIFF "encryptionOptions": { "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file. "pvkPassword": "A String", # Password that encrypts the private key "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file. }, "noRecovery": True or False, # Whether or not the backup importing will restore database with NORECOVERY option Applies only to Cloud SQL for SQL Server. "recoveryOnly": True or False, # Whether or not the backup importing request will just bring database online without downloading Bak content only one of "no_recovery" and "recovery_only" can be true otherwise error will return. Applies only to Cloud SQL for SQL Server. "stopAt": "A String", # Optional. The timestamp when the import should stop. This timestamp is in the [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`). This field is equivalent to the STOPAT keyword and applies to Cloud SQL for SQL Server only. "stopAtMark": "A String", # Optional. The marked transaction where the import should stop. This field is equivalent to the STOPATMARK keyword and applies to Cloud SQL for SQL Server only. "striped": True or False, # Whether or not the backup set being restored is striped. Applies only to Cloud SQL for SQL Server. }, "csvImportOptions": { # Options for importing data as CSV. "columns": [ # The columns to which CSV data is imported. If not specified, all columns of the database table are loaded with CSV data. "A String", ], "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped. "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file. "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values. "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted. "table": "A String", # The table to which CSV data is imported. }, "database": "A String", # The target database for the import. If `fileType` is `SQL`, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If `fileType` is `CSV`, one database must be specified. "fileType": "A String", # The file type for the specified uri.\`SQL`: The file contains SQL statements. \`CSV`: The file contains CSV data. "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only. "kind": "A String", # This is always `sql#importContext`. "sqlImportOptions": { # Optional. Options for importing data from SQL statements. "parallel": True or False, # Optional. Whether or not the import should be parallel. "postgresImportOptions": { # Optional. Options for importing from a Cloud SQL for PostgreSQL instance. "clean": True or False, # Optional. The --clean flag for the pg_restore utility. This flag applies only if you enabled Cloud SQL to import files in parallel. "ifExists": True or False, # Optional. The --if-exists flag for the pg_restore utility. This flag applies only if you enabled Cloud SQL to import files in parallel. }, "threads": 42, # Optional. The number of threads to use for parallel import. }, "uri": "A String", # Path to the import file in Cloud Storage, in the form `gs://bucketName/fileName`. Compressed gzip files (.gz) are supported when `fileType` is `SQL`. The instance must have write permissions to the bucket and read access to the file. }, "insertTime": "A String", # The time this operation was enqueued in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "kind": "A String", # This is always `sql#operation`. "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation. "operationType": "A String", # The type of the operation. Valid values are: * `CREATE` * `DELETE` * `UPDATE` * `RESTART` * `IMPORT` * `EXPORT` * `BACKUP_VOLUME` * `RESTORE_VOLUME` * `CREATE_USER` * `DELETE_USER` * `CREATE_DATABASE` * `DELETE_DATABASE` "selfLink": "A String", # The URI of this resource. "startTime": "A String", # The time this operation actually started in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "status": "A String", # The status of an operation. "targetId": "A String", # Name of the database instance related to this operation. "targetLink": "A String", "targetProject": "A String", # The project ID of the target instance related to this operation. "user": "A String", # The email address of the user who initiated this operation. }
update(project, instance, database, body=None, x__xgafv=None)
Updates a resource containing information about a database inside a Cloud SQL instance. Args: project: string, Project ID of the project that contains the instance. (required) instance: string, Database instance ID. This does not include the project ID. (required) database: string, Name of the database to be updated in the instance. (required) body: object, The request body. The object takes the form of: { # Represents a SQL database on the Cloud SQL instance. "charset": "A String", # The Cloud SQL charset value. "collation": "A String", # The Cloud SQL collation value. "etag": "A String", # This field is deprecated and will be removed from a future version of the API. "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID. "kind": "A String", # This is always `sql#database`. "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name. "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable. "selfLink": "A String", # The URI of this resource. "sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance. "compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible "recoveryModel": "A String", # The recovery model of a SQL Server database }, } x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource. "acquireSsrsLeaseContext": { # Acquire SSRS lease context. # The context for acquire SSRS lease operation, if applicable. "duration": "A String", # Lease duration needed for SSRS setup. "reportDatabase": "A String", # The report database to be used for SSRS setup. "serviceLogin": "A String", # The username to be used as the service login to connect to the report database for SSRS setup. "setupLogin": "A String", # The username to be used as the setup login to connect to the database server for SSRS setup. }, "apiWarning": { # An Admin API warning message. # An Admin API warning message. "code": "A String", # Code to uniquely identify the warning type. "message": "A String", # The warning message. "region": "A String", # The region name for REGION_UNREACHABLE warning. }, "backupContext": { # Backup context. # The context for backup operation, if applicable. "backupId": "A String", # The identifier of the backup. "kind": "A String", # This is always `sql#backupContext`. }, "endTime": "A String", # The time this operation finished in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated. "errors": [ # The list of errors encountered while processing this operation. { # Database instance operation error. "code": "A String", # Identifies the specific error that occurred. "kind": "A String", # This is always `sql#operationError`. "message": "A String", # Additional information about the error encountered. }, ], "kind": "A String", # This is always `sql#operationErrors`. }, "exportContext": { # Database instance export context. # The context for export operation, if applicable. "bakExportOptions": { # Options for exporting BAK files (SQL Server-only) "bakType": "A String", # Type of this bak file will be export, FULL or DIFF, SQL Server only "copyOnly": True or False, # Deprecated: copy_only is deprecated. Use differential_base instead "differentialBase": True or False, # Whether or not the backup can be used as a differential base copy_only backup can not be served as differential base "exportLogEndTime": "A String", # Optional. The end timestamp when transaction log will be included in the export operation. [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in UTC. When omitted, all available logs until current time will be included. Only applied to Cloud SQL for SQL Server. "exportLogStartTime": "A String", # Optional. The begin timestamp when transaction log will be included in the export operation. [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in UTC. When omitted, all available logs from the beginning of retention period will be included. Only applied to Cloud SQL for SQL Server. "stripeCount": 42, # Option for specifying how many stripes to use for the export. If blank, and the value of the striped field is true, the number of stripes is automatically chosen. "striped": True or False, # Whether or not the export should be striped. }, "csvExportOptions": { # Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only. "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped. "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file. "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values. "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted. "selectQuery": "A String", # The select query used to extract the data. }, "databases": [ # Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is specified, all databases are exported, except for the `mysql` system database. If `fileType` is `CSV`, you can specify one database, either by using this property or by using the `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL instances:` You must specify one database to be exported. If `fileType` is `CSV`, this database must match the one specified in the `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database to be exported, and the `fileType` must be `BAK`. "A String", ], "fileType": "A String", # The file type for the specified uri. "kind": "A String", # This is always `sql#exportContext`. "offload": True or False, # Option for export offload. "sqlExportOptions": { # Options for exporting data as SQL statements. "mysqlExportOptions": { # Options for exporting from MySQL. "masterData": 42, # Option to include SQL statement required to set up replication. If set to `1`, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. If set to any value other than `1`, --set-gtid-purged is set to OFF. }, "parallel": True or False, # Optional. Whether or not the export should be parallel. "postgresExportOptions": { # Options for exporting from a Cloud SQL for PostgreSQL instance. "clean": True or False, # Optional. Use this option to include DROP SQL statements. These statements are used to delete database objects before running the import operation. "ifExists": True or False, # Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by clean. }, "schemaOnly": True or False, # Export only schemas. "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table. "A String", ], "threads": 42, # Optional. The number of threads to use for parallel export. }, "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are compressed. }, "importContext": { # Database instance import context. # The context for import operation, if applicable. "bakImportOptions": { # Import parameters specific to SQL Server .BAK files "bakType": "A String", # Type of the bak content, FULL or DIFF "encryptionOptions": { "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file. "pvkPassword": "A String", # Password that encrypts the private key "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file. }, "noRecovery": True or False, # Whether or not the backup importing will restore database with NORECOVERY option Applies only to Cloud SQL for SQL Server. "recoveryOnly": True or False, # Whether or not the backup importing request will just bring database online without downloading Bak content only one of "no_recovery" and "recovery_only" can be true otherwise error will return. Applies only to Cloud SQL for SQL Server. "stopAt": "A String", # Optional. The timestamp when the import should stop. This timestamp is in the [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`). This field is equivalent to the STOPAT keyword and applies to Cloud SQL for SQL Server only. "stopAtMark": "A String", # Optional. The marked transaction where the import should stop. This field is equivalent to the STOPATMARK keyword and applies to Cloud SQL for SQL Server only. "striped": True or False, # Whether or not the backup set being restored is striped. Applies only to Cloud SQL for SQL Server. }, "csvImportOptions": { # Options for importing data as CSV. "columns": [ # The columns to which CSV data is imported. If not specified, all columns of the database table are loaded with CSV data. "A String", ], "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped. "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file. "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values. "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted. "table": "A String", # The table to which CSV data is imported. }, "database": "A String", # The target database for the import. If `fileType` is `SQL`, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If `fileType` is `CSV`, one database must be specified. "fileType": "A String", # The file type for the specified uri.\`SQL`: The file contains SQL statements. \`CSV`: The file contains CSV data. "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only. "kind": "A String", # This is always `sql#importContext`. "sqlImportOptions": { # Optional. Options for importing data from SQL statements. "parallel": True or False, # Optional. Whether or not the import should be parallel. "postgresImportOptions": { # Optional. Options for importing from a Cloud SQL for PostgreSQL instance. "clean": True or False, # Optional. The --clean flag for the pg_restore utility. This flag applies only if you enabled Cloud SQL to import files in parallel. "ifExists": True or False, # Optional. The --if-exists flag for the pg_restore utility. This flag applies only if you enabled Cloud SQL to import files in parallel. }, "threads": 42, # Optional. The number of threads to use for parallel import. }, "uri": "A String", # Path to the import file in Cloud Storage, in the form `gs://bucketName/fileName`. Compressed gzip files (.gz) are supported when `fileType` is `SQL`. The instance must have write permissions to the bucket and read access to the file. }, "insertTime": "A String", # The time this operation was enqueued in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "kind": "A String", # This is always `sql#operation`. "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation. "operationType": "A String", # The type of the operation. Valid values are: * `CREATE` * `DELETE` * `UPDATE` * `RESTART` * `IMPORT` * `EXPORT` * `BACKUP_VOLUME` * `RESTORE_VOLUME` * `CREATE_USER` * `DELETE_USER` * `CREATE_DATABASE` * `DELETE_DATABASE` "selfLink": "A String", # The URI of this resource. "startTime": "A String", # The time this operation actually started in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example `2012-11-15T16:19:00.094Z`. "status": "A String", # The status of an operation. "targetId": "A String", # Name of the database instance related to this operation. "targetLink": "A String", "targetProject": "A String", # The project ID of the target instance related to this operation. "user": "A String", # The email address of the user who initiated this operation. }