Page MenuHomePhorge

D14855.1765013076.diff
No OneTemporary

Size
6 KB
Referenced Files
None
Subscribers
None

D14855.1765013076.diff

diff --git a/native/cpp/CommonCpp/DatabaseManagers/SQLiteQueryExecutor.cpp b/native/cpp/CommonCpp/DatabaseManagers/SQLiteQueryExecutor.cpp
--- a/native/cpp/CommonCpp/DatabaseManagers/SQLiteQueryExecutor.cpp
+++ b/native/cpp/CommonCpp/DatabaseManagers/SQLiteQueryExecutor.cpp
@@ -246,15 +246,28 @@
"FROM messages AS m "
"LEFT JOIN media "
" ON m.id = media.container "
- "WHERE m.thread = ? "
+ "WHERE m.thread = :thread "
+ "UNION "
+ "SELECT "
+ " m.id, m.local_id, m.thread, m.user, m.type, m.future_type, "
+ " m.content, m.time, backup_media.id, backup_media.container, "
+ " backup_media.thread, backup_media.uri, backup_media.type, "
+ " backup_media.extras "
+ "FROM backup_messages AS m "
+ "LEFT JOIN backup_media "
+ " ON m.id = backup_media.container "
+ "WHERE m.thread = :thread "
"ORDER BY m.time DESC, m.id DESC "
- "LIMIT ? OFFSET ?;";
+ "LIMIT :limit OFFSET :offset;";
SQLiteStatementWrapper preparedSQL(
this->getConnection(), query, "Failed to fetch messages.");
- bindStringToSQL(threadID.c_str(), preparedSQL, 1);
- bindIntToSQL(limit, preparedSQL, 2);
- bindIntToSQL(offset, preparedSQL, 3);
+ int thread_index = sqlite3_bind_parameter_index(preparedSQL, ":thread");
+ bindStringToSQL(threadID.c_str(), preparedSQL, thread_index);
+ int limit_index = sqlite3_bind_parameter_index(preparedSQL, ":limit");
+ bindIntToSQL(limit, preparedSQL, limit_index);
+ int offset_index = sqlite3_bind_parameter_index(preparedSQL, ":offset");
+ bindIntToSQL(offset, preparedSQL, offset_index);
return this->processMessagesResults(preparedSQL);
}
diff --git a/web/shared-worker/_generated/comm_query_executor.wasm b/web/shared-worker/_generated/comm_query_executor.wasm
index 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000
GIT binary patch
literal 0
Hc$@<O00001
literal 0
Hc$@<O00001
diff --git a/web/shared-worker/queries/fetch-messages-queries.test.js b/web/shared-worker/queries/fetch-messages-queries.test.js
--- a/web/shared-worker/queries/fetch-messages-queries.test.js
+++ b/web/shared-worker/queries/fetch-messages-queries.test.js
@@ -1,6 +1,9 @@
// @flow
+import { threadSpecs } from 'lib/shared/threads/thread-specs.js';
import { messageTypes } from 'lib/types/message-types-enum.js';
+import { threadTypes } from 'lib/types/thread-types-enum.js';
+import type { ThreadType } from 'lib/types/thread-types-enum.js';
import { getDatabaseModule } from '../db-module.js';
import type {
@@ -14,8 +17,11 @@
describe('Fetch messages queries', () => {
let queryExecutor;
let dbModule;
- const threadID = '123';
const userID = '124';
+ const threadID = '123';
+ const threadType: ThreadType = threadTypes.COMMUNITY_OPEN_SUBTHREAD;
+ const thickThreadID = 'thick-thread-123';
+ const thickThreadType: ThreadType = threadTypes.PERSONAL;
beforeAll(async () => {
dbModule = getDatabaseModule();
@@ -28,6 +34,31 @@
throw new Error('SQLiteQueryExecutor is missing');
}
+ // Create the regular thread
+ queryExecutor.replaceThread(
+ {
+ id: threadID,
+ type: threadType,
+ name: null,
+ avatar: null,
+ description: null,
+ color: 'ffffff',
+ creationTime: BigInt(1000),
+ parentThreadID: null,
+ containingThreadID: null,
+ community: null,
+ members: '1',
+ roles: '1',
+ currentUser: '{}',
+ sourceMessageID: null,
+ repliesCount: 0,
+ pinnedCount: 0,
+ timestamps: null,
+ },
+ threadSpecs[threadType].protocol.dataIsBackedUp,
+ );
+
+ // Create 50 messages using the appropriate backup flag
for (let i = 0; i < 50; i++) {
const message: WebMessage = {
id: i.toString(),
@@ -39,7 +70,52 @@
content: `text-${i}`,
time: BigInt(i),
};
- queryExecutor.replaceMessage(message, false);
+ queryExecutor.replaceMessage(
+ message,
+ threadSpecs[threadType].protocol.dataIsBackedUp,
+ );
+ }
+
+ // Create a thick thread that uses backup tables
+ queryExecutor.replaceThread(
+ {
+ id: thickThreadID,
+ type: thickThreadType,
+ name: null,
+ avatar: null,
+ description: null,
+ color: 'ffffff',
+ creationTime: BigInt(2000),
+ parentThreadID: null,
+ containingThreadID: null,
+ community: null,
+ members: '1',
+ roles: '1',
+ currentUser: '{}',
+ sourceMessageID: null,
+ repliesCount: 0,
+ pinnedCount: 0,
+ timestamps: null,
+ },
+ threadSpecs[thickThreadType].protocol.dataIsBackedUp,
+ );
+
+ // Add 10 messages to the thick thread (which will go to backup tables)
+ for (let i = 0; i < 10; i++) {
+ const message: WebMessage = {
+ id: `thick-${i}`,
+ localID: null,
+ thread: thickThreadID,
+ user: userID,
+ type: messageTypes.TEXT,
+ futureType: null,
+ content: `thick-text-${i}`,
+ time: BigInt(1000 + i),
+ };
+ queryExecutor.replaceMessage(
+ message,
+ threadSpecs[thickThreadType].protocol.dataIsBackedUp,
+ );
}
});
@@ -105,4 +181,25 @@
assertMessageEqual(result[i], 49 - i);
}
});
+
+ it('should fetch messages from both normal and backup tables', () => {
+ // Fetch messages from thick thread - should get all
+ // 10 since they're in backup tables
+ const thickResult = queryExecutor.fetchMessages(thickThreadID, 20, 0);
+ expect(thickResult.length).toBe(10);
+
+ // Verify the messages are correctly fetched (ordered by time DESC)
+ for (let i = 0; i < 10; i++) {
+ expect(thickResult[i].message.id).toBe(`thick-${9 - i}`);
+ expect(thickResult[i].message.content).toBe(`thick-text-${9 - i}`);
+ expect(thickResult[i].message.thread).toBe(thickThreadID);
+ }
+
+ // Fetch messages from original thin thread - should still work
+ const thinResult = queryExecutor.fetchMessages(threadID, 5, 0);
+ expect(thinResult.length).toBe(5);
+ for (let i = 0; i < 5; i++) {
+ assertMessageEqual(thinResult[i], 49 - i);
+ }
+ });
});

File Metadata

Mime Type
text/plain
Expires
Sat, Dec 6, 9:24 AM (2 h, 47 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
5835779
Default Alt Text
D14855.1765013076.diff (6 KB)

Event Timeline