Fix bad migration state that could happen during a device transfer.
This commit is contained in:
parent
63ce2de3bf
commit
8c707555f2
13 changed files with 549 additions and 9 deletions
|
@ -835,9 +835,13 @@ class CallTable(context: Context, databaseHelper: SignalDatabase) : DatabaseTabl
|
|||
"""
|
||||
SqlUtil.buildQuery(selection, 0, 0, glob, glob, glob, glob)
|
||||
} else {
|
||||
SqlUtil.buildQuery("""
|
||||
SqlUtil.buildQuery(
|
||||
"""
|
||||
${RecipientTable.TABLE_NAME}.${RecipientTable.BLOCKED} = ? AND ${RecipientTable.TABLE_NAME}.${RecipientTable.HIDDEN} = ?
|
||||
""", 0, 0)
|
||||
""",
|
||||
0,
|
||||
0
|
||||
)
|
||||
}
|
||||
|
||||
val offsetLimit = if (limit > 0) {
|
||||
|
|
|
@ -4657,6 +4657,22 @@ open class MessageTable(context: Context?, databaseHelper: SignalDatabase) : Dat
|
|||
.run()
|
||||
}
|
||||
|
||||
fun updatePendingSelfData(placeholder: RecipientId, self: RecipientId) {
|
||||
val fromUpdates = writableDatabase
|
||||
.update(TABLE_NAME)
|
||||
.values(FROM_RECIPIENT_ID to self.serialize())
|
||||
.where("$FROM_RECIPIENT_ID = ?", placeholder)
|
||||
.run()
|
||||
|
||||
val toUpdates = writableDatabase
|
||||
.update(TABLE_NAME)
|
||||
.values(TO_RECIPIENT_ID to self.serialize())
|
||||
.where("$TO_RECIPIENT_ID = ?", placeholder)
|
||||
.run()
|
||||
|
||||
Log.i(TAG, "Updated $fromUpdates FROM_RECIPIENT_ID rows and $toUpdates TO_RECIPIENT_ID rows.")
|
||||
}
|
||||
|
||||
private fun getStickyWherePartForParentStoryId(parentStoryId: Long?): String {
|
||||
return if (parentStoryId == null) {
|
||||
" AND $PARENT_STORY_ID <= 0"
|
||||
|
|
|
@ -14,6 +14,7 @@ import net.zetetic.database.sqlcipher.SQLiteConstraintException
|
|||
import org.signal.core.util.Bitmask
|
||||
import org.signal.core.util.CursorUtil
|
||||
import org.signal.core.util.SqlUtil
|
||||
import org.signal.core.util.delete
|
||||
import org.signal.core.util.exists
|
||||
import org.signal.core.util.logging.Log
|
||||
import org.signal.core.util.optionalBlob
|
||||
|
@ -400,6 +401,9 @@ open class RecipientTable(context: Context, databaseHelper: SignalDatabase) : Da
|
|||
$TABLE_NAME.$HIDDEN = 0
|
||||
ORDER BY ${ThreadTable.TABLE_NAME}.${ThreadTable.DATE} DESC LIMIT 50
|
||||
"""
|
||||
|
||||
/** Used as a placeholder recipient for self during migrations when self isn't yet available. */
|
||||
private val PLACEHOLDER_SELF_ID = -2L
|
||||
}
|
||||
|
||||
fun getByE164(e164: String): Optional<RecipientId> {
|
||||
|
@ -2013,7 +2017,8 @@ open class RecipientTable(context: Context, databaseHelper: SignalDatabase) : Da
|
|||
* Associates the provided IDs together. The assumption here is that all of the IDs correspond to the local user and have been verified.
|
||||
*/
|
||||
fun linkIdsForSelf(aci: ACI, pni: PNI, e164: String) {
|
||||
getAndPossiblyMerge(serviceId = aci, pni = pni, e164 = e164, changeSelf = true, pniVerified = true)
|
||||
val id: RecipientId = getAndPossiblyMerge(serviceId = aci, pni = pni, e164 = e164, changeSelf = true, pniVerified = true)
|
||||
updatePendingSelfData(id)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -3910,6 +3915,25 @@ open class RecipientTable(context: Context, databaseHelper: SignalDatabase) : Da
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Should be called immediately after we create a recipient for self.
|
||||
* This clears up any placeholders we put in the database for the local user, which is typically only done in database migrations.
|
||||
*/
|
||||
fun updatePendingSelfData(selfId: RecipientId) {
|
||||
SignalDatabase.messages.updatePendingSelfData(RecipientId.from(PLACEHOLDER_SELF_ID), selfId)
|
||||
|
||||
val deletes = writableDatabase
|
||||
.delete(TABLE_NAME)
|
||||
.where("$ID = ?", PLACEHOLDER_SELF_ID)
|
||||
.run()
|
||||
|
||||
if (deletes > 0) {
|
||||
Log.w(TAG, "Deleted a PLACEHOLDER_SELF from the table.")
|
||||
} else {
|
||||
Log.i(TAG, "No PLACEHOLDER_SELF in the table.")
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Should only be used for debugging! A very destructive action that clears all known serviceIds from people with phone numbers (so that we could eventually
|
||||
* get them back through CDS).
|
||||
|
|
|
@ -169,11 +169,11 @@ open class SignalDatabase(private val context: Application, databaseSecret: Data
|
|||
db.setTransactionSuccessful()
|
||||
} finally {
|
||||
db.endTransaction()
|
||||
|
||||
// We have to re-begin the transaction for the calling code (see comment at start of method)
|
||||
db.beginTransaction()
|
||||
}
|
||||
|
||||
// We have to re-begin the transaction for the calling code (see comment at start of method)
|
||||
db.beginTransaction()
|
||||
|
||||
migratePostTransaction(context, oldVersion)
|
||||
Log.i(TAG, "Upgrade complete. Took " + (System.currentTimeMillis() - startTime) + " ms.")
|
||||
}
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.thoughtcrime.securesms.database.helpers.migration.V184_CallLinkReplac
|
|||
import org.thoughtcrime.securesms.database.helpers.migration.V185_MessageRecipientsAndEditMessageMigration
|
||||
import org.thoughtcrime.securesms.database.helpers.migration.V186_ForeignKeyIndicesMigration
|
||||
import org.thoughtcrime.securesms.database.helpers.migration.V187_MoreForeignKeyIndexesMigration
|
||||
import org.thoughtcrime.securesms.database.helpers.migration.V188_FixMessageRecipientsAndEditMessageMigration
|
||||
|
||||
/**
|
||||
* Contains all of the database migrations for [SignalDatabase]. Broken into a separate file for cleanliness.
|
||||
|
@ -51,7 +52,7 @@ object SignalDatabaseMigrations {
|
|||
|
||||
val TAG: String = Log.tag(SignalDatabaseMigrations.javaClass)
|
||||
|
||||
const val DATABASE_VERSION = 187
|
||||
const val DATABASE_VERSION = 188
|
||||
|
||||
@JvmStatic
|
||||
fun migrate(context: Application, db: SQLiteDatabase, oldVersion: Int, newVersion: Int) {
|
||||
|
@ -210,6 +211,10 @@ object SignalDatabaseMigrations {
|
|||
if (oldVersion < 187) {
|
||||
V187_MoreForeignKeyIndexesMigration.migrate(context, db, oldVersion, newVersion)
|
||||
}
|
||||
|
||||
if (oldVersion < 188) {
|
||||
V188_FixMessageRecipientsAndEditMessageMigration.migrate(context, db, oldVersion, newVersion)
|
||||
}
|
||||
}
|
||||
|
||||
@JvmStatic
|
||||
|
|
|
@ -4,6 +4,8 @@ import android.app.Application
|
|||
import net.zetetic.database.sqlcipher.SQLiteDatabase
|
||||
import org.signal.core.util.Stopwatch
|
||||
import org.signal.core.util.logging.Log
|
||||
import org.signal.core.util.readToList
|
||||
import org.signal.core.util.requireNonNullString
|
||||
|
||||
/**
|
||||
* We added some foreign keys to the message table (particularly on original_message_id and latest_revision_id)
|
||||
|
@ -22,6 +24,13 @@ object V186_ForeignKeyIndicesMigration : SignalDatabaseMigration {
|
|||
private val TAG = Log.tag(V186_ForeignKeyIndicesMigration::class.java)
|
||||
|
||||
override fun migrate(context: Application, db: SQLiteDatabase, oldVersion: Int, newVersion: Int) {
|
||||
// This was added after a bug was found in V185 that resulted in it not being run for users who restored from backup.
|
||||
// In that case, this column would be missing, and the migration would fail. This is cleaned up in V188.
|
||||
if (!columnExists(db, "message", "from_recipient_id")) {
|
||||
Log.w(TAG, "V185 wasn't run successfully! Skipping the migration for now. It'll run in V188.")
|
||||
return
|
||||
}
|
||||
|
||||
val stopwatch = Stopwatch("migration")
|
||||
|
||||
db.execSQL("CREATE INDEX IF NOT EXISTS message_original_message_id_index ON message (original_message_id)")
|
||||
|
@ -51,4 +60,10 @@ object V186_ForeignKeyIndicesMigration : SignalDatabaseMigration {
|
|||
|
||||
stopwatch.stop(TAG)
|
||||
}
|
||||
|
||||
private fun columnExists(db: SQLiteDatabase, table: String, column: String): Boolean {
|
||||
return db.query("PRAGMA table_info($table)", null)
|
||||
.readToList { it.requireNonNullString("name") }
|
||||
.any { it == column }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,354 @@
|
|||
package org.thoughtcrime.securesms.database.helpers.migration
|
||||
|
||||
import android.app.Application
|
||||
import android.preference.PreferenceManager
|
||||
import androidx.core.content.contentValuesOf
|
||||
import net.zetetic.database.sqlcipher.SQLiteDatabase
|
||||
import org.signal.core.util.SqlUtil
|
||||
import org.signal.core.util.Stopwatch
|
||||
import org.signal.core.util.logging.Log
|
||||
import org.signal.core.util.readToList
|
||||
import org.signal.core.util.readToSingleInt
|
||||
import org.signal.core.util.readToSingleObject
|
||||
import org.signal.core.util.requireLong
|
||||
import org.signal.core.util.requireNonNullString
|
||||
import org.signal.core.util.requireString
|
||||
import org.thoughtcrime.securesms.database.KeyValueDatabase
|
||||
import org.thoughtcrime.securesms.dependencies.ApplicationDependencies
|
||||
import org.thoughtcrime.securesms.recipients.RecipientId
|
||||
import org.whispersystems.signalservice.api.push.ACI
|
||||
|
||||
/**
|
||||
* This is a fix for a bad situation that could happen during [V185_MessageRecipientsAndEditMessageMigration].
|
||||
* That migration required the concept of a "self" in order to do the migration. This was all well and good
|
||||
* for an account that had already registered.
|
||||
*
|
||||
* But for people who had restored a backup and needed to run this migration afterwards as part of the restore
|
||||
* process, they were restoring messages without any notion of a self. And migration just sort of ignored that
|
||||
* case. And so those users are now stuck in a situation where all of their to/from addresses are messed up.
|
||||
*
|
||||
* To start, if those users finished registration, we should just be able to run V185 on them again, and it
|
||||
* should just work out for them.
|
||||
*
|
||||
* But for people who are hitting this migration during a backup restore, we need to run this migration without
|
||||
* the concept of a self. To do that, we're going to create a placeholder for self with a special ID (-2), and then
|
||||
* we're going to replace that ID with the true self after it's been created.
|
||||
*/
|
||||
object V188_FixMessageRecipientsAndEditMessageMigration : SignalDatabaseMigration {
|
||||
|
||||
private val TAG = Log.tag(V188_FixMessageRecipientsAndEditMessageMigration::class.java)
|
||||
|
||||
private val outgoingClause = "(" + listOf(21, 23, 22, 24, 25, 26, 2, 11)
|
||||
.map { "type & ${0x1F} = $it" }
|
||||
.joinToString(separator = " OR ") + ")"
|
||||
|
||||
private const val PLACEHOLDER_ID = -2L
|
||||
|
||||
override fun migrate(context: Application, db: SQLiteDatabase, oldVersion: Int, newVersion: Int) {
|
||||
if (columnExists(db, "message", "from_recipient_id")) {
|
||||
Log.i(TAG, "Already performed the migration! No need to do this.")
|
||||
return
|
||||
}
|
||||
|
||||
Log.w(TAG, "Detected that V185 wasn't run properly! Repairing.")
|
||||
|
||||
val stopwatch = Stopwatch("migration")
|
||||
|
||||
var selfId: RecipientId? = getSelfId(db)
|
||||
|
||||
if (selfId == null) {
|
||||
val outgoingMessageCount = db.rawQuery("SELECT COUNT(*) FROM message WHERE $outgoingClause").readToSingleInt()
|
||||
if (outgoingMessageCount == 0) {
|
||||
Log.i(TAG, "Could not find ourselves in the DB! Assuming this is an install that hasn't been registered yet.")
|
||||
} else {
|
||||
Log.w(TAG, "There's outgoing messages, but no self recipient! Attempting to repair.")
|
||||
|
||||
val localAci: ACI? = getLocalAci(context)
|
||||
val localE164: String? = getLocalE164(context)
|
||||
|
||||
if (localAci != null || localE164 != null) {
|
||||
Log.w(TAG, "Inserting a recipient for our local data.")
|
||||
val contentValues = contentValuesOf(
|
||||
"uuid" to localAci.toString(),
|
||||
"number" to localE164
|
||||
)
|
||||
|
||||
val id = db.insert("recipient", null, contentValues)
|
||||
selfId = RecipientId.from(id)
|
||||
} else {
|
||||
Log.w(TAG, "No local recipient data at all! This must be after a backup-restore. Using a placeholder recipient.")
|
||||
db.insert("recipient", null, contentValuesOf("_id" to PLACEHOLDER_ID))
|
||||
selfId = RecipientId.from(PLACEHOLDER_ID)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Log.i(TAG, "Was able to find a selfId -- must have registered in-between.")
|
||||
}
|
||||
|
||||
stopwatch.split("get-self")
|
||||
|
||||
val dependentItems: List<SqlItem> = getAllDependentItems(db, "message")
|
||||
|
||||
dependentItems.forEach { item ->
|
||||
val sql = "DROP ${item.type} IF EXISTS ${item.name}"
|
||||
Log.d(TAG, "Executing: $sql")
|
||||
db.execSQL(sql)
|
||||
}
|
||||
|
||||
stopwatch.split("drop-dependents")
|
||||
|
||||
db.execSQL(
|
||||
"""
|
||||
CREATE TABLE message_tmp (
|
||||
_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
date_sent INTEGER NOT NULL,
|
||||
date_received INTEGER NOT NULL,
|
||||
date_server INTEGER DEFAULT -1,
|
||||
thread_id INTEGER NOT NULL REFERENCES thread (_id) ON DELETE CASCADE,
|
||||
from_recipient_id INTEGER NOT NULL REFERENCES recipient (_id) ON DELETE CASCADE,
|
||||
from_device_id INTEGER,
|
||||
to_recipient_id INTEGER NOT NULL REFERENCES recipient (_id) ON DELETE CASCADE,
|
||||
type INTEGER NOT NULL,
|
||||
body TEXT,
|
||||
read INTEGER DEFAULT 0,
|
||||
ct_l TEXT,
|
||||
exp INTEGER,
|
||||
m_type INTEGER,
|
||||
m_size INTEGER,
|
||||
st INTEGER,
|
||||
tr_id TEXT,
|
||||
subscription_id INTEGER DEFAULT -1,
|
||||
receipt_timestamp INTEGER DEFAULT -1,
|
||||
delivery_receipt_count INTEGER DEFAULT 0,
|
||||
read_receipt_count INTEGER DEFAULT 0,
|
||||
viewed_receipt_count INTEGER DEFAULT 0,
|
||||
mismatched_identities TEXT DEFAULT NULL,
|
||||
network_failures TEXT DEFAULT NULL,
|
||||
expires_in INTEGER DEFAULT 0,
|
||||
expire_started INTEGER DEFAULT 0,
|
||||
notified INTEGER DEFAULT 0,
|
||||
quote_id INTEGER DEFAULT 0,
|
||||
quote_author INTEGER DEFAULT 0,
|
||||
quote_body TEXT DEFAULT NULL,
|
||||
quote_missing INTEGER DEFAULT 0,
|
||||
quote_mentions BLOB DEFAULT NULL,
|
||||
quote_type INTEGER DEFAULT 0,
|
||||
shared_contacts TEXT DEFAULT NULL,
|
||||
unidentified INTEGER DEFAULT 0,
|
||||
link_previews TEXT DEFAULT NULL,
|
||||
view_once INTEGER DEFAULT 0,
|
||||
reactions_unread INTEGER DEFAULT 0,
|
||||
reactions_last_seen INTEGER DEFAULT -1,
|
||||
remote_deleted INTEGER DEFAULT 0,
|
||||
mentions_self INTEGER DEFAULT 0,
|
||||
notified_timestamp INTEGER DEFAULT 0,
|
||||
server_guid TEXT DEFAULT NULL,
|
||||
message_ranges BLOB DEFAULT NULL,
|
||||
story_type INTEGER DEFAULT 0,
|
||||
parent_story_id INTEGER DEFAULT 0,
|
||||
export_state BLOB DEFAULT NULL,
|
||||
exported INTEGER DEFAULT 0,
|
||||
scheduled_date INTEGER DEFAULT -1,
|
||||
latest_revision_id INTEGER DEFAULT NULL REFERENCES message (_id) ON DELETE CASCADE,
|
||||
original_message_id INTEGER DEFAULT NULL REFERENCES message (_id) ON DELETE CASCADE,
|
||||
revision_number INTEGER DEFAULT 0
|
||||
)
|
||||
"""
|
||||
)
|
||||
stopwatch.split("create-table")
|
||||
|
||||
db.execSQL(
|
||||
"""
|
||||
INSERT INTO message_tmp
|
||||
SELECT
|
||||
_id,
|
||||
date_sent,
|
||||
date_received,
|
||||
date_server,
|
||||
thread_id,
|
||||
recipient_id,
|
||||
recipient_device_id,
|
||||
recipient_id,
|
||||
type,
|
||||
body,
|
||||
read,
|
||||
ct_l,
|
||||
exp,
|
||||
m_type,
|
||||
m_size,
|
||||
st,
|
||||
tr_id,
|
||||
subscription_id,
|
||||
receipt_timestamp,
|
||||
delivery_receipt_count,
|
||||
read_receipt_count,
|
||||
viewed_receipt_count,
|
||||
mismatched_identities,
|
||||
network_failures,
|
||||
expires_in,
|
||||
expire_started,
|
||||
notified,
|
||||
quote_id,
|
||||
quote_author,
|
||||
quote_body,
|
||||
quote_missing,
|
||||
quote_mentions,
|
||||
quote_type,
|
||||
shared_contacts,
|
||||
unidentified,
|
||||
link_previews,
|
||||
view_once,
|
||||
reactions_unread,
|
||||
reactions_last_seen,
|
||||
remote_deleted,
|
||||
mentions_self,
|
||||
notified_timestamp,
|
||||
server_guid,
|
||||
message_ranges,
|
||||
story_type,
|
||||
parent_story_id,
|
||||
export_state,
|
||||
exported,
|
||||
scheduled_date,
|
||||
NULL AS latest_revision_id,
|
||||
NULL AS original_message_id,
|
||||
0 as revision_number
|
||||
FROM message
|
||||
"""
|
||||
)
|
||||
stopwatch.split("copy-data")
|
||||
|
||||
// Previously, the recipient_id on an outgoing message represented who it was going to (an individual or group).
|
||||
// So if a message is outgoing, we'll set to = from, then from = self
|
||||
if (selfId != null) {
|
||||
db.execSQL(
|
||||
"""
|
||||
UPDATE message_tmp
|
||||
SET
|
||||
to_recipient_id = from_recipient_id,
|
||||
from_recipient_id = ${selfId.toLong()},
|
||||
from_device_id = 1
|
||||
WHERE $outgoingClause
|
||||
"""
|
||||
)
|
||||
}
|
||||
stopwatch.split("update-data")
|
||||
|
||||
db.execSQL("DROP TABLE message")
|
||||
stopwatch.split("drop-old-table")
|
||||
|
||||
db.execSQL("ALTER TABLE message_tmp RENAME TO message")
|
||||
stopwatch.split("rename-table")
|
||||
|
||||
dependentItems.forEach { item ->
|
||||
val sql = when (item.name) {
|
||||
"mms_thread_story_parent_story_scheduled_date_index" -> "CREATE INDEX message_thread_story_parent_story_scheduled_date_latest_revision_id_index ON message (thread_id, date_received, story_type, parent_story_id, scheduled_date, latest_revision_id)"
|
||||
"mms_quote_id_quote_author_scheduled_date_index" -> "CREATE INDEX message_quote_id_quote_author_scheduled_date_latest_revision_id_index ON message (quote_id, quote_author, scheduled_date, latest_revision_id)"
|
||||
"mms_date_sent_index" -> "CREATE INDEX message_date_sent_from_to_thread_index ON message (date_sent, from_recipient_id, to_recipient_id, thread_id)"
|
||||
else -> item.createStatement.replace(Regex.fromLiteral("CREATE INDEX mms_"), "CREATE INDEX message_")
|
||||
}
|
||||
Log.d(TAG, "Executing: $sql")
|
||||
db.execSQL(sql)
|
||||
}
|
||||
stopwatch.split("recreate-dependents")
|
||||
|
||||
// These are the indexes that should have been created in V186 -- conditionally done here in case it didn't run properly
|
||||
db.execSQL("CREATE INDEX IF NOT EXISTS message_original_message_id_index ON message (original_message_id)")
|
||||
db.execSQL("CREATE INDEX IF NOT EXISTS message_latest_revision_id_index ON message (latest_revision_id)")
|
||||
db.execSQL("CREATE INDEX IF NOT EXISTS message_from_recipient_id_index ON message (from_recipient_id)")
|
||||
db.execSQL("CREATE INDEX IF NOT EXISTS message_to_recipient_id_index ON message (to_recipient_id)")
|
||||
db.execSQL("CREATE INDEX IF NOT EXISTS reaction_author_id_index ON reaction (author_id)")
|
||||
db.execSQL("DROP INDEX IF EXISTS message_quote_id_quote_author_scheduled_date_index")
|
||||
db.execSQL("CREATE INDEX IF NOT EXISTS message_quote_id_quote_author_scheduled_date_latest_revision_id_index ON message (quote_id, quote_author, scheduled_date, latest_revision_id)")
|
||||
stopwatch.split("v186-indexes")
|
||||
|
||||
val foreignKeyViolations: List<SqlUtil.ForeignKeyViolation> = SqlUtil.getForeignKeyViolations(db, "message")
|
||||
if (foreignKeyViolations.isNotEmpty()) {
|
||||
Log.w(TAG, "Foreign key violations!\n${foreignKeyViolations.joinToString(separator = "\n")}")
|
||||
throw IllegalStateException("Foreign key violations!")
|
||||
}
|
||||
stopwatch.split("fk-check")
|
||||
|
||||
stopwatch.stop(TAG)
|
||||
}
|
||||
|
||||
private fun getSelfId(db: SQLiteDatabase): RecipientId? {
|
||||
val idByAci: RecipientId? = getLocalAci(ApplicationDependencies.getApplication())?.let { aci ->
|
||||
db.rawQuery("SELECT _id FROM recipient WHERE uuid = ?", SqlUtil.buildArgs(aci))
|
||||
.readToSingleObject { RecipientId.from(it.requireLong("_id")) }
|
||||
}
|
||||
|
||||
if (idByAci != null) {
|
||||
return idByAci
|
||||
}
|
||||
|
||||
Log.w(TAG, "Failed to find by ACI! Will try by E164.")
|
||||
|
||||
val idByE164: RecipientId? = getLocalE164(ApplicationDependencies.getApplication())?.let { e164 ->
|
||||
db.rawQuery("SELECT _id FROM recipient WHERE phone = ?", SqlUtil.buildArgs(e164))
|
||||
.readToSingleObject { RecipientId.from(it.requireLong("_id")) }
|
||||
}
|
||||
|
||||
if (idByE164 == null) {
|
||||
Log.w(TAG, "Also failed to find by E164!")
|
||||
}
|
||||
|
||||
return idByE164
|
||||
}
|
||||
|
||||
private fun getLocalAci(context: Application): ACI? {
|
||||
if (KeyValueDatabase.exists(context)) {
|
||||
val keyValueDatabase = KeyValueDatabase.getInstance(context).readableDatabase
|
||||
keyValueDatabase.query("key_value", arrayOf("value"), "key = ?", SqlUtil.buildArgs("account.aci"), null, null, null).use { cursor ->
|
||||
return if (cursor.moveToFirst()) {
|
||||
ACI.parseOrNull(cursor.requireString("value"))
|
||||
} else {
|
||||
Log.w(TAG, "ACI not present in KV database!")
|
||||
null
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Log.w(TAG, "Pre-KV database -- searching for ACI in shared prefs.")
|
||||
return ACI.parseOrNull(PreferenceManager.getDefaultSharedPreferences(context).getString("pref_local_uuid", null))
|
||||
}
|
||||
}
|
||||
|
||||
private fun getLocalE164(context: Application): String? {
|
||||
if (KeyValueDatabase.exists(context)) {
|
||||
val keyValueDatabase = KeyValueDatabase.getInstance(context).readableDatabase
|
||||
keyValueDatabase.query("key_value", arrayOf("value"), "key = ?", SqlUtil.buildArgs("account.e164"), null, null, null).use { cursor ->
|
||||
return if (cursor.moveToFirst()) {
|
||||
cursor.requireString("value")
|
||||
} else {
|
||||
Log.w(TAG, "E164 not present in KV database!")
|
||||
null
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Log.w(TAG, "Pre-KV database -- searching for E164 in shared prefs.")
|
||||
return PreferenceManager.getDefaultSharedPreferences(context).getString("pref_local_number", null)
|
||||
}
|
||||
}
|
||||
|
||||
private fun getAllDependentItems(db: SQLiteDatabase, tableName: String): List<SqlItem> {
|
||||
return db.rawQuery("SELECT type, name, sql FROM sqlite_schema WHERE tbl_name='$tableName' AND type != 'table'").readToList { cursor ->
|
||||
SqlItem(
|
||||
type = cursor.requireNonNullString("type"),
|
||||
name = cursor.requireNonNullString("name"),
|
||||
createStatement = cursor.requireNonNullString("sql")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private fun columnExists(db: SQLiteDatabase, table: String, column: String): Boolean {
|
||||
return db.query("PRAGMA table_info($table)", null)
|
||||
.readToList { it.requireNonNullString("name") }
|
||||
.any { it == column }
|
||||
}
|
||||
|
||||
data class SqlItem(
|
||||
val type: String,
|
||||
val name: String,
|
||||
val createStatement: String
|
||||
)
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
package org.thoughtcrime.securesms.jobs
|
||||
|
||||
import org.signal.core.util.logging.Log
|
||||
import org.thoughtcrime.securesms.jobmanager.Job
|
||||
import org.thoughtcrime.securesms.jobmanager.impl.NetworkConstraint
|
||||
import org.thoughtcrime.securesms.keyvalue.SignalStore
|
||||
import org.thoughtcrime.securesms.recipients.Recipient
|
||||
import org.thoughtcrime.securesms.util.Base64
|
||||
import org.thoughtcrime.securesms.util.ProfileUtil
|
||||
import org.whispersystems.signalservice.api.profiles.SignalServiceProfile
|
||||
import java.io.IOException
|
||||
import kotlin.time.Duration.Companion.days
|
||||
|
||||
/**
|
||||
* The worker job for [org.thoughtcrime.securesms.migrations.AccountConsistencyMigrationJob].
|
||||
*/
|
||||
class AccountConsistencyWorkerJob private constructor(parameters: Parameters) : BaseJob(parameters) {
|
||||
|
||||
companion object {
|
||||
private val TAG = Log.tag(AccountConsistencyWorkerJob::class.java)
|
||||
|
||||
const val KEY = "AccountConsistencyWorkerJob"
|
||||
}
|
||||
|
||||
constructor() : this(
|
||||
Parameters.Builder()
|
||||
.setMaxInstancesForFactory(1)
|
||||
.addConstraint(NetworkConstraint.KEY)
|
||||
.setMaxAttempts(Parameters.UNLIMITED)
|
||||
.setLifespan(30.days.inWholeMilliseconds)
|
||||
.build()
|
||||
)
|
||||
|
||||
override fun serialize(): ByteArray? = null
|
||||
|
||||
override fun getFactoryKey(): String = KEY
|
||||
|
||||
override fun onFailure() = Unit
|
||||
|
||||
override fun onRun() {
|
||||
if (!SignalStore.account().hasAciIdentityKey()) {
|
||||
Log.i(TAG, "No identity set yet, skipping.")
|
||||
return
|
||||
}
|
||||
|
||||
val profile: SignalServiceProfile = ProfileUtil.retrieveProfileSync(context, Recipient.self(), SignalServiceProfile.RequestType.PROFILE, false).profile
|
||||
val encodedPublicKey = Base64.encodeBytes(SignalStore.account().aciIdentityKey.publicKey.serialize())
|
||||
|
||||
if (profile.identityKey != encodedPublicKey) {
|
||||
Log.w(TAG, "Identity key on profile differed from the one we have locally! Marking ourselves unregistered.")
|
||||
|
||||
SignalStore.account().setRegistered(false)
|
||||
SignalStore.registrationValues().clearRegistrationComplete()
|
||||
SignalStore.registrationValues().clearHasUploadedProfile()
|
||||
} else {
|
||||
Log.i(TAG, "Everything matched.")
|
||||
}
|
||||
}
|
||||
|
||||
override fun onShouldRetry(e: Exception): Boolean {
|
||||
return e is IOException
|
||||
}
|
||||
|
||||
class Factory : Job.Factory<AccountConsistencyWorkerJob> {
|
||||
override fun create(parameters: Parameters, serializedData: ByteArray?): AccountConsistencyWorkerJob {
|
||||
return AccountConsistencyWorkerJob(parameters)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -31,6 +31,7 @@ import org.thoughtcrime.securesms.jobmanager.migrations.RecipientIdJobMigration;
|
|||
import org.thoughtcrime.securesms.jobmanager.migrations.RetrieveProfileJobMigration;
|
||||
import org.thoughtcrime.securesms.jobmanager.migrations.SendReadReceiptsJobMigration;
|
||||
import org.thoughtcrime.securesms.jobmanager.migrations.SenderKeyDistributionSendJobRecipientMigration;
|
||||
import org.thoughtcrime.securesms.migrations.AccountConsistencyMigrationJob;
|
||||
import org.thoughtcrime.securesms.migrations.AccountRecordMigrationJob;
|
||||
import org.thoughtcrime.securesms.migrations.ApplyUnknownFieldsToSelfMigrationJob;
|
||||
import org.thoughtcrime.securesms.migrations.AttachmentCleanupMigrationJob;
|
||||
|
@ -84,6 +85,7 @@ public final class JobManagerFactories {
|
|||
|
||||
public static Map<String, Job.Factory> getJobFactories(@NonNull Application application) {
|
||||
return new HashMap<String, Job.Factory>() {{
|
||||
put(AccountConsistencyWorkerJob.KEY, new AccountConsistencyWorkerJob.Factory());
|
||||
put(AttachmentCopyJob.KEY, new AttachmentCopyJob.Factory());
|
||||
put(AttachmentDownloadJob.KEY, new AttachmentDownloadJob.Factory());
|
||||
put(AttachmentUploadJob.KEY, new AttachmentUploadJob.Factory());
|
||||
|
@ -209,6 +211,7 @@ public final class JobManagerFactories {
|
|||
put(UpdateApkJob.KEY, new UpdateApkJob.Factory());
|
||||
|
||||
// Migrations
|
||||
put(AccountConsistencyMigrationJob.KEY, new AccountConsistencyMigrationJob.Factory());
|
||||
put(AccountRecordMigrationJob.KEY, new AccountRecordMigrationJob.Factory());
|
||||
put(ApplyUnknownFieldsToSelfMigrationJob.KEY, new ApplyUnknownFieldsToSelfMigrationJob.Factory());
|
||||
put(AttachmentCleanupMigrationJob.KEY, new AttachmentCleanupMigrationJob.Factory());
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
package org.thoughtcrime.securesms.migrations
|
||||
|
||||
import org.signal.core.util.logging.Log
|
||||
import org.thoughtcrime.securesms.dependencies.ApplicationDependencies
|
||||
import org.thoughtcrime.securesms.jobmanager.Job
|
||||
import org.thoughtcrime.securesms.jobs.AccountConsistencyWorkerJob
|
||||
import org.thoughtcrime.securesms.keyvalue.SignalStore
|
||||
|
||||
/**
|
||||
* Migration to help address some account consistency issues that resulted under very specific situation post-device-transfer.
|
||||
*/
|
||||
internal class AccountConsistencyMigrationJob(
|
||||
parameters: Parameters = Parameters.Builder().build()
|
||||
) : MigrationJob(parameters) {
|
||||
|
||||
companion object {
|
||||
const val KEY = "AccountConsistencyMigrationJob"
|
||||
|
||||
val TAG = Log.tag(AccountConsistencyMigrationJob::class.java)
|
||||
}
|
||||
|
||||
override fun getFactoryKey(): String = KEY
|
||||
|
||||
override fun isUiBlocking(): Boolean = false
|
||||
|
||||
override fun performMigration() {
|
||||
if (!SignalStore.account().hasAciIdentityKey()) {
|
||||
Log.i(TAG, "No identity set yet, skipping.")
|
||||
return
|
||||
}
|
||||
|
||||
ApplicationDependencies.getJobManager().add(AccountConsistencyWorkerJob())
|
||||
}
|
||||
|
||||
override fun shouldRetry(e: Exception): Boolean = false
|
||||
|
||||
class Factory : Job.Factory<AccountConsistencyMigrationJob> {
|
||||
override fun create(parameters: Parameters, serializedData: ByteArray?): AccountConsistencyMigrationJob {
|
||||
return AccountConsistencyMigrationJob(parameters)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -127,9 +127,10 @@ public class ApplicationMigrations {
|
|||
static final int TO_FROM_RECIPIENTS = 82;
|
||||
static final int REBUILD_MESSAGE_FTS_INDEX_4 = 83;
|
||||
static final int INDEX_DATABASE_MIGRATION = 84;
|
||||
static final int ACCOUNT_CONSISTENCY_CHECK = 85;
|
||||
}
|
||||
|
||||
public static final int CURRENT_VERSION = 84;
|
||||
public static final int CURRENT_VERSION = 85;
|
||||
|
||||
/**
|
||||
* This *must* be called after the {@link JobManager} has been instantiated, but *before* the call
|
||||
|
@ -567,6 +568,10 @@ public class ApplicationMigrations {
|
|||
jobs.put(Version.INDEX_DATABASE_MIGRATION, new DatabaseMigrationJob());
|
||||
}
|
||||
|
||||
if (lastSeenVersion < Version.ACCOUNT_CONSISTENCY_CHECK) {
|
||||
jobs.put(Version.ACCOUNT_CONSISTENCY_CHECK, new AccountConsistencyMigrationJob());
|
||||
}
|
||||
|
||||
return jobs;
|
||||
}
|
||||
|
||||
|
|
|
@ -168,7 +168,9 @@ public final class LiveRecipientCache {
|
|||
}
|
||||
|
||||
if (selfId == null) {
|
||||
Log.i(TAG, "Creating self for the first time.");
|
||||
selfId = recipientTable.getAndPossiblyMerge(localAci, localE164);
|
||||
recipientTable.updatePendingSelfData(selfId);
|
||||
}
|
||||
|
||||
synchronized (localRecipientId) {
|
||||
|
|
|
@ -14,7 +14,6 @@ import java.io.InputStream;
|
|||
import java.io.OutputStream;
|
||||
import java.net.ServerSocket;
|
||||
import java.net.Socket;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Performs the networking setup/tear down for the server. This includes
|
||||
|
@ -110,6 +109,8 @@ final class NetworkServerThread extends Thread {
|
|||
handler.sendEmptyMessage(NETWORK_CLIENT_DISCONNECTED);
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
Log.w(TAG, e);
|
||||
} finally {
|
||||
|
|
Loading…
Add table
Reference in a new issue