Skip to content

Commit 38b8050

Browse files
committed
Improved dataProcess.stream()
± Refactored GPTforLove processing to rely on deltas to eliminate `apis[api].accumulatesText` dependency ± Created `let replyChunk` separate from `const chunk` for better clarity ± Renamed `accumulatedChunks` to `textToShow` ± Condensed final return routine
1 parent 7793f50 commit 38b8050

File tree

4 files changed

+84
-72
lines changed

4 files changed

+84
-72
lines changed

amazongpt/greasemonkey/amazongpt.user.js

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
// @description Adds the magic of AI to Amazon shopping
44
// @author KudoAI
55
// @namespace https://kudoai.com
6-
// @version 2025.1.21.2
6+
// @version 2025.1.21.3
77
// @license MIT
88
// @icon https://amazongpt.kudoai.com/assets/images/icons/amazongpt/black-gold-teal/icon48.png?v=0fddfc7
99
// @icon64 https://amazongpt.kudoai.com/assets/images/icons/amazongpt/black-gold-teal/icon64.png?v=0fddfc7
@@ -2647,35 +2647,37 @@
26472647
if (config.streamingDisabled || !config.proxyAPIenabled) return
26482648
log.caller = `get.${caller.name}() » dataProcess.stream()`
26492649
const failFlagsAndURLs = this.initFailFlags(callerAPI),
2650-
reader = resp.response.getReader() ; let accumulatedChunks = ''
2650+
reader = resp.response.getReader() ; let textToShow = ''
26512651
reader.read().then(result => processStreamText(result, callerAPI))
26522652
.catch(err => log.error('Error processing stream', err.message))
26532653

26542654
function processStreamText({ done, value }, callerAPI) {
26552655

26562656
// Handle stream done
2657-
let chunk = new TextDecoder('utf8').decode(new Uint8Array(value))
2657+
const chunk = new TextDecoder('utf8').decode(new Uint8Array(value))
26582658
if (done || chunk.includes(apis[callerAPI].watermark)) return handleProcessCompletion()
26592659
if (env.browser.isChromium) { // clear/add timeout since reader.read() doesn't signal done
26602660
clearTimeout(this.timeout) ; this.timeout = setTimeout(handleProcessCompletion, 500) }
26612661

2662-
// Process/show chunk
2663-
if (callerAPI == 'MixerBox AI') { // pre-process chunks
2662+
// Process/accumulate chunk
2663+
let replyChunk = ''
2664+
if (callerAPI == 'GPTforLove') { // extract parentID + chunk.delta
2665+
const chunkLines = chunk.trim().split('\n'),
2666+
chunkObjs = chunkLines.map(line => JSON.parse(line))
2667+
if (chunkObjs[0].id) apis.GPTforLove.parentID = chunkObjs[0].id // for contextual replies
2668+
chunkObjs.forEach(obj =>
2669+
replyChunk += obj.delta // AI reply
2670+
|| JSON.stringify(obj)) // error response for fail flag check
2671+
} else if (callerAPI == 'MixerBox AI') { // extract/normalize chunk.data
26642672
const extractedChunks = Array.from(chunk.matchAll(/data:(.*)/g), match => match[1]
26652673
.replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
26662674
.filter(match => !/message_(?:start|end)|done/.test(match))
2667-
chunk = extractedChunks.join('')
2675+
replyChunk = extractedChunks.join('')
26682676
}
2669-
accumulatedChunks = apis[callerAPI].accumulatesText ? chunk : accumulatedChunks + chunk
2670-
try { // to show stream text
2671-
let textToShow = ''
2672-
if (callerAPI == 'GPTforLove') { // extract parentID + latest chunk text
2673-
const jsonLines = accumulatedChunks.split('\n'),
2674-
nowResult = JSON.parse(jsonLines[jsonLines.length -1])
2675-
if (nowResult.id) apis.GPTforLove.parentID = nowResult.id // for contextual replies
2676-
textToShow = nowResult.text // for AI response
2677-
|| JSON.stringify(nowResult) // for error response
2678-
} else textToShow = accumulatedChunks
2677+
textToShow += replyChunk
2678+
2679+
// Show accumulated reply chunks
2680+
try {
26792681
const failMatch = failFlagsAndURLs.exec(textToShow)
26802682
if (failMatch) {
26812683
log.dev('Text to show', textToShow)
@@ -2689,9 +2691,10 @@
26892691
) show.reply(textToShow)
26902692
}
26912693
} catch (err) { log.error('Error showing stream', err.message) }
2694+
2695+
// Read next chunk, process if designated sender
26922696
return reader.read().then(({ done, value }) => {
2693-
if (caller.sender == callerAPI) // am designated sender, recurse
2694-
processStreamText({ done, value }, callerAPI)
2697+
if (caller.sender == callerAPI) processStreamText({ done, value }, callerAPI)
26952698
}).catch(err => log.error('Error reading stream', err.message))
26962699
}
26972700

bravegpt/greasemonkey/bravegpt.user.js

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@
148148
// @description:zu Yengeza izimpendulo ze-AI ku-Brave Search (inikwa amandla yi-GPT-4o!)
149149
// @author KudoAI
150150
// @namespace https://kudoai.com
151-
// @version 2025.1.21.2
151+
// @version 2025.1.21.3
152152
// @license MIT
153153
// @icon https://assets.bravegpt.com/images/icons/bravegpt/icon48.png?v=df624b0
154154
// @icon64 https://assets.bravegpt.com/images/icons/bravegpt/icon64.png?v=df624b0
@@ -3397,35 +3397,37 @@
33973397
if (config.streamingDisabled || !config.proxyAPIenabled) return
33983398
log.caller = `get.${caller.name}() » dataProcess.stream()`
33993399
const failFlagsAndURLs = this.initFailFlags(callerAPI),
3400-
reader = resp.response.getReader() ; let accumulatedChunks = ''
3400+
reader = resp.response.getReader() ; let textToShow = ''
34013401
reader.read().then(result => processStreamText(result, callerAPI))
34023402
.catch(err => log.error('Error processing stream', err.message))
34033403

34043404
function processStreamText({ done, value }, callerAPI) {
34053405

34063406
// Handle stream done
3407-
let chunk = new TextDecoder('utf8').decode(new Uint8Array(value))
3407+
const chunk = new TextDecoder('utf8').decode(new Uint8Array(value))
34083408
if (done || chunk.includes(apis[callerAPI].watermark)) return handleProcessCompletion()
34093409
if (env.browser.isChromium) { // clear/add timeout since reader.read() doesn't signal done
34103410
clearTimeout(this.timeout) ; this.timeout = setTimeout(handleProcessCompletion, 500) }
34113411

3412-
// Process/show chunk
3413-
if (callerAPI == 'MixerBox AI') { // pre-process chunks
3412+
// Process/accumulate chunk
3413+
let replyChunk = ''
3414+
if (callerAPI == 'GPTforLove') { // extract parentID + chunk.delta
3415+
const chunkLines = chunk.trim().split('\n'),
3416+
chunkObjs = chunkLines.map(line => JSON.parse(line))
3417+
if (chunkObjs[0].id) apis.GPTforLove.parentID = chunkObjs[0].id // for contextual replies
3418+
chunkObjs.forEach(obj =>
3419+
replyChunk += obj.delta // AI reply
3420+
|| JSON.stringify(obj)) // error response for fail flag check
3421+
} else if (callerAPI == 'MixerBox AI') { // extract/normalize chunk.data
34143422
const extractedChunks = Array.from(chunk.matchAll(/data:(.*)/g), match => match[1]
34153423
.replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
34163424
.filter(match => !/message_(?:start|end)|done/.test(match))
3417-
chunk = extractedChunks.join('')
3425+
replyChunk = extractedChunks.join('')
34183426
}
3419-
accumulatedChunks = apis[callerAPI].accumulatesText ? chunk : accumulatedChunks + chunk
3420-
try { // to show stream text
3421-
let textToShow = ''
3422-
if (callerAPI == 'GPTforLove') { // extract parentID + latest chunk text
3423-
const jsonLines = accumulatedChunks.split('\n'),
3424-
nowResult = JSON.parse(jsonLines[jsonLines.length -1])
3425-
if (nowResult.id) apis.GPTforLove.parentID = nowResult.id // for contextual replies
3426-
textToShow = nowResult.text // for AI response
3427-
|| JSON.stringify(nowResult) // for error response
3428-
} else textToShow = accumulatedChunks
3427+
textToShow += replyChunk
3428+
3429+
// Show accumulated reply chunks
3430+
try {
34293431
const failMatch = failFlagsAndURLs.exec(textToShow)
34303432
if (failMatch) {
34313433
log.dev('Text to show', textToShow)
@@ -3439,9 +3441,10 @@
34393441
) show.reply(textToShow, footerContent)
34403442
}
34413443
} catch (err) { log.error('Error showing stream', err.message) }
3444+
3445+
// Read next chunk, process if designated sender
34423446
return reader.read().then(({ done, value }) => {
3443-
if (caller.sender == callerAPI) // am designated sender, recurse
3444-
processStreamText({ done, value }, callerAPI)
3447+
if (caller.sender == callerAPI) processStreamText({ done, value }, callerAPI)
34453448
}).catch(err => log.error('Error reading stream', err.message))
34463449
}
34473450

duckduckgpt/greasemonkey/duckduckgpt.user.js

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@
148148
// @description:zu Yengeza izimpendulo ze-AI ku-DuckDuckGo (inikwa amandla yi-GPT-4o!)
149149
// @author KudoAI
150150
// @namespace https://kudoai.com
151-
// @version 2025.1.21.3
151+
// @version 2025.1.21.4
152152
// @license MIT
153153
// @icon https://assets.ddgpt.com/images/icons/duckduckgpt/icon48.png?v=06af076
154154
// @icon64 https://assets.ddgpt.com/images/icons/duckduckgpt/icon64.png?v=06af076
@@ -3281,35 +3281,37 @@
32813281
if (config.streamingDisabled || !config.proxyAPIenabled) return
32823282
log.caller = `get.${caller.name}() » dataProcess.stream()`
32833283
const failFlagsAndURLs = this.initFailFlags(callerAPI),
3284-
reader = resp.response.getReader() ; let accumulatedChunks = ''
3284+
reader = resp.response.getReader() ; let textToShow = ''
32853285
reader.read().then(result => processStreamText(result, callerAPI))
32863286
.catch(err => log.error('Error processing stream', err.message))
32873287

32883288
function processStreamText({ done, value }, callerAPI) {
32893289

32903290
// Handle stream done
3291-
let chunk = new TextDecoder('utf8').decode(new Uint8Array(value))
3291+
const chunk = new TextDecoder('utf8').decode(new Uint8Array(value))
32923292
if (done || chunk.includes(apis[callerAPI].watermark)) return handleProcessCompletion()
32933293
if (env.browser.isChromium) { // clear/add timeout since reader.read() doesn't signal done
32943294
clearTimeout(this.timeout) ; this.timeout = setTimeout(handleProcessCompletion, 500) }
32953295

3296-
// Process/show chunk
3297-
if (callerAPI == 'MixerBox AI') { // pre-process chunks
3296+
// Process/accumulate chunk
3297+
let replyChunk = ''
3298+
if (callerAPI == 'GPTforLove') { // extract parentID + chunk.delta
3299+
const chunkLines = chunk.trim().split('\n'),
3300+
chunkObjs = chunkLines.map(line => JSON.parse(line))
3301+
if (chunkObjs[0].id) apis.GPTforLove.parentID = chunkObjs[0].id // for contextual replies
3302+
chunkObjs.forEach(obj =>
3303+
replyChunk += obj.delta // AI reply
3304+
|| JSON.stringify(obj)) // error response for fail flag check
3305+
} else if (callerAPI == 'MixerBox AI') { // extract/normalize chunk.data
32983306
const extractedChunks = Array.from(chunk.matchAll(/data:(.*)/g), match => match[1]
32993307
.replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
33003308
.filter(match => !/message_(?:start|end)|done/.test(match))
3301-
chunk = extractedChunks.join('')
3309+
replyChunk = extractedChunks.join('')
33023310
}
3303-
accumulatedChunks = apis[callerAPI].accumulatesText ? chunk : accumulatedChunks + chunk
3304-
try { // to show stream text
3305-
let textToShow = ''
3306-
if (callerAPI == 'GPTforLove') { // extract parentID + latest chunk text
3307-
const jsonLines = accumulatedChunks.split('\n'),
3308-
nowResult = JSON.parse(jsonLines[jsonLines.length -1])
3309-
if (nowResult.id) apis.GPTforLove.parentID = nowResult.id // for contextual replies
3310-
textToShow = nowResult.text // for AI response
3311-
|| JSON.stringify(nowResult) // for error response
3312-
} else textToShow = accumulatedChunks
3311+
textToShow += replyChunk
3312+
3313+
// Show accumulated reply chunks
3314+
try {
33133315
const failMatch = failFlagsAndURLs.exec(textToShow)
33143316
if (failMatch) {
33153317
log.dev('Text to show', textToShow)
@@ -3323,9 +3325,10 @@
33233325
) show.reply(textToShow)
33243326
}
33253327
} catch (err) { log.error('Error showing stream', err.message) }
3328+
3329+
// Read next chunk, process if designated sender
33263330
return reader.read().then(({ done, value }) => {
3327-
if (caller.sender == callerAPI) // am designated sender, recurse
3328-
processStreamText({ done, value }, callerAPI)
3331+
if (caller.sender == callerAPI) processStreamText({ done, value }, callerAPI)
33293332
}).catch(err => log.error('Error reading stream', err.message))
33303333
}
33313334

googlegpt/greasemonkey/googlegpt.user.js

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@
149149
// @description:zu Yengeza izimpendulo ze-AI ku-Google Search (inikwa amandla yi-Google Gemma + GPT-4o!)
150150
// @author KudoAI
151151
// @namespace https://kudoai.com
152-
// @version 2025.1.21.2
152+
// @version 2025.1.21.3
153153
// @license MIT
154154
// @icon https://assets.googlegpt.io/images/icons/googlegpt/black/icon48.png?v=59409b2
155155
// @icon64 https://assets.googlegpt.io/images/icons/googlegpt/black/icon64.png?v=59409b2
@@ -3577,35 +3577,37 @@
35773577
if (config.streamingDisabled || !config.proxyAPIenabled) return
35783578
log.caller = `get.${caller.name}() » dataProcess.stream()`
35793579
const failFlagsAndURLs = this.initFailFlags(callerAPI),
3580-
reader = resp.response.getReader() ; let accumulatedChunks = ''
3580+
reader = resp.response.getReader() ; let textToShow = ''
35813581
reader.read().then(result => processStreamText(result, callerAPI))
35823582
.catch(err => log.error('Error processing stream', err.message))
35833583

35843584
function processStreamText({ done, value }, callerAPI) {
35853585

35863586
// Handle stream done
3587-
let chunk = new TextDecoder('utf8').decode(new Uint8Array(value))
3587+
const chunk = new TextDecoder('utf8').decode(new Uint8Array(value))
35883588
if (done || chunk.includes(apis[callerAPI].watermark)) return handleProcessCompletion()
35893589
if (env.browser.isChromium) { // clear/add timeout since reader.read() doesn't signal done
35903590
clearTimeout(this.timeout) ; this.timeout = setTimeout(handleProcessCompletion, 500) }
35913591

3592-
// Process/show chunk
3593-
if (callerAPI == 'MixerBox AI') { // pre-process chunks
3592+
// Process/accumulate chunk
3593+
let replyChunk = ''
3594+
if (callerAPI == 'GPTforLove') { // extract parentID + chunk.delta
3595+
const chunkLines = chunk.trim().split('\n'),
3596+
chunkObjs = chunkLines.map(line => JSON.parse(line))
3597+
if (chunkObjs[0].id) apis.GPTforLove.parentID = chunkObjs[0].id // for contextual replies
3598+
chunkObjs.forEach(obj =>
3599+
replyChunk += obj.delta // AI reply
3600+
|| JSON.stringify(obj)) // error response for fail flag check
3601+
} else if (callerAPI == 'MixerBox AI') { // extract/normalize chunk.data
35943602
const extractedChunks = Array.from(chunk.matchAll(/data:(.*)/g), match => match[1]
35953603
.replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
35963604
.filter(match => !/message_(?:start|end)|done/.test(match))
3597-
chunk = extractedChunks.join('')
3605+
replyChunk = extractedChunks.join('')
35983606
}
3599-
accumulatedChunks = apis[callerAPI].accumulatesText ? chunk : accumulatedChunks + chunk
3600-
try { // to show stream text
3601-
let textToShow = ''
3602-
if (callerAPI == 'GPTforLove') { // extract parentID + latest chunk text
3603-
const jsonLines = accumulatedChunks.split('\n'),
3604-
nowResult = JSON.parse(jsonLines[jsonLines.length -1])
3605-
if (nowResult.id) apis.GPTforLove.parentID = nowResult.id // for contextual replies
3606-
textToShow = nowResult.text // for AI response
3607-
|| JSON.stringify(nowResult) // for error response
3608-
} else textToShow = accumulatedChunks
3607+
textToShow += replyChunk
3608+
3609+
// Show accumulated reply chunks
3610+
try {
36093611
const failMatch = failFlagsAndURLs.exec(textToShow)
36103612
if (failMatch) {
36113613
log.dev('Text to show', textToShow)
@@ -3619,9 +3621,10 @@
36193621
) show.reply(textToShow, footerContent)
36203622
}
36213623
} catch (err) { log.error('Error showing stream', err.message) }
3624+
3625+
// Read next chunk, process if designated sender
36223626
return reader.read().then(({ done, value }) => {
3623-
if (caller.sender == callerAPI) // am designated sender, recurse
3624-
processStreamText({ done, value }, callerAPI)
3627+
if (caller.sender == callerAPI) processStreamText({ done, value }, callerAPI)
36253628
}).catch(err => log.error('Error reading stream', err.message))
36263629
}
36273630

0 commit comments

Comments
 (0)