Skip to content

Commit 805dbe4

Browse files
committed
Condensed dataProcess.text(), improved comments
1 parent 31a45b0 commit 805dbe4

File tree

4 files changed

+55
-47
lines changed

4 files changed

+55
-47
lines changed

amazongpt/greasemonkey/amazongpt.user.js

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
// @description Adds the magic of AI to Amazon shopping
44
// @author KudoAI
55
// @namespace https://kudoai.com
6-
// @version 2025.1.21.6
6+
// @version 2025.1.21.7
77
// @license MIT
88
// @icon https://amazongpt.kudoai.com/assets/images/icons/amazongpt/black-gold-teal/icon48.png?v=0fddfc7
99
// @icon64 https://amazongpt.kudoai.com/assets/images/icons/amazongpt/black-gold-teal/icon64.png?v=0fddfc7
@@ -2723,7 +2723,7 @@
27232723
: resp.status == 429 ? ['tooManyRequests', 'suggestProxy']
27242724
: ['openAInotWorking', 'suggestProxy'] )
27252725
else api.tryNew(caller)
2726-
} else if (callerAPI == 'OpenAI' && resp.response) {
2726+
} else if (callerAPI == 'OpenAI' && resp.response) { // show response from OpenAI
27272727
const failMatch = failFlagsAndURLs.exec(resp.response)
27282728
if (failMatch) { // suggest proxy
27292729
log.dev('Response text', resp.response)
@@ -2735,20 +2735,22 @@
27352735
handleProcessCompletion()
27362736
} catch (err) { handleProcessError(err) }
27372737
}
2738-
} else if (resp.responseText) { // show response
2738+
} else if (resp.responseText) { // show response from proxy API
27392739
if (callerAPI == 'GPTforLove') {
27402740
try {
2741-
const chunks = resp.responseText.trim().split('\n'),
2742-
lastChunk = JSON.parse(chunks[chunks.length -1])
2743-
if (lastChunk.id) apis.GPTforLove.parentID = lastChunk.id
2744-
textToShow = lastChunk.text ; handleProcessCompletion()
2741+
const chunkLines = resp.responseText.trim().split('\n'),
2742+
lastChunkObj = JSON.parse(chunkLines[chunkLines.length -1])
2743+
apis.GPTforLove.parentID = lastChunkObj.id || null
2744+
textToShow = lastChunkObj.text ; handleProcessCompletion()
27452745
} catch (err) { handleProcessError(err) }
27462746
} else if (callerAPI == 'MixerBox AI') {
27472747
try {
2748-
const extractedData = [...resp.responseText.matchAll(/data:(.*)/g)].map(match => match[1]
2749-
.replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
2750-
.filter(match => !/message_(?:start|end)|done/.test(match))
2751-
textToShow = extractedData.join('') ; handleProcessCompletion()
2748+
textToShow = [...resp.responseText.matchAll(/data:(.*)/g)] // arrayify data
2749+
.filter(match => !/message_(?:start|end)|done/.test(match)) // exclude signals
2750+
.map(match => // normalize whitespace
2751+
match[1].replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
2752+
.join('') // stringify AI reply text
2753+
handleProcessCompletion()
27522754
} catch (err) { handleProcessError(err) }
27532755
} else { // no processing required for all other APIs
27542756
textToShow = resp.responseText ; handleProcessCompletion() }

bravegpt/greasemonkey/bravegpt.user.js

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@
148148
// @description:zu Yengeza izimpendulo ze-AI ku-Brave Search (inikwa amandla yi-GPT-4o!)
149149
// @author KudoAI
150150
// @namespace https://kudoai.com
151-
// @version 2025.1.21.6
151+
// @version 2025.1.21.7
152152
// @license MIT
153153
// @icon https://assets.bravegpt.com/images/icons/bravegpt/icon48.png?v=df624b0
154154
// @icon64 https://assets.bravegpt.com/images/icons/bravegpt/icon64.png?v=df624b0
@@ -3473,33 +3473,35 @@
34733473
: resp.status == 429 ? ['tooManyRequests', 'suggestProxy']
34743474
: ['openAInotWorking', 'suggestProxy'] )
34753475
else api.tryNew(caller)
3476-
} else if (callerAPI == 'OpenAI' && resp.response) {
3476+
} else if (callerAPI == 'OpenAI' && resp.response) { // show response or return RQs from OpenAI
34773477
const failMatch = failFlagsAndURLs.exec(resp.response)
34783478
if (failMatch) { // suggest proxy or try diff API
34793479
log.dev('Response text', resp.response)
34803480
log.error('Fail flag detected', `'${failMatch[0]}'`)
34813481
if (caller == get.reply) appAlert('openAInotWorking', 'suggestProxy')
34823482
else api.tryNew(caller)
34833483
} else {
3484-
try { // to show response or return related queries
3484+
try { // to show response or return RQs
34853485
textToShow = JSON.parse(resp.response).choices[0].message.content
34863486
handleProcessCompletion()
34873487
} catch (err) { handleProcessError(err) }
34883488
}
3489-
} else if (resp.responseText) { // show response or return related queries
3489+
} else if (resp.responseText) { // show response or return RQs from proxy API
34903490
if (callerAPI == 'GPTforLove') {
34913491
try {
3492-
const chunks = resp.responseText.trim().split('\n'),
3493-
lastChunk = JSON.parse(chunks[chunks.length -1])
3494-
if (lastChunk.id) apis.GPTforLove.parentID = lastChunk.id
3495-
textToShow = lastChunk.text ; handleProcessCompletion()
3492+
const chunkLines = resp.responseText.trim().split('\n'),
3493+
lastChunkObj = JSON.parse(chunkLines[chunkLines.length -1])
3494+
apis.GPTforLove.parentID = lastChunkObj.id || null
3495+
textToShow = lastChunkObj.text ; handleProcessCompletion()
34963496
} catch (err) { handleProcessError(err) }
34973497
} else if (callerAPI == 'MixerBox AI') {
34983498
try {
3499-
const extractedData = [...resp.responseText.matchAll(/data:(.*)/g)].map(match => match[1]
3500-
.replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
3501-
.filter(match => !/message_(?:start|end)|done/.test(match))
3502-
textToShow = extractedData.join('') ; handleProcessCompletion()
3499+
textToShow = [...resp.responseText.matchAll(/data:(.*)/g)] // arrayify data
3500+
.filter(match => !/message_(?:start|end)|done/.test(match)) // exclude signals
3501+
.map(match => // normalize whitespace
3502+
match[1].replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
3503+
.join('') // stringify AI reply text
3504+
handleProcessCompletion()
35033505
} catch (err) { handleProcessError(err) }
35043506
} else { // no processing required for all other APIs
35053507
textToShow = resp.responseText ; handleProcessCompletion() }

duckduckgpt/greasemonkey/duckduckgpt.user.js

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@
148148
// @description:zu Yengeza izimpendulo ze-AI ku-DuckDuckGo (inikwa amandla yi-GPT-4o!)
149149
// @author KudoAI
150150
// @namespace https://kudoai.com
151-
// @version 2025.1.21.8
151+
// @version 2025.1.21.9
152152
// @license MIT
153153
// @icon https://assets.ddgpt.com/images/icons/duckduckgpt/icon48.png?v=06af076
154154
// @icon64 https://assets.ddgpt.com/images/icons/duckduckgpt/icon64.png?v=06af076
@@ -3357,33 +3357,35 @@
33573357
: resp.status == 429 ? ['tooManyRequests', 'suggestProxy']
33583358
: ['openAInotWorking', 'suggestProxy'] )
33593359
else api.tryNew(caller)
3360-
} else if (callerAPI == 'OpenAI' && resp.response) {
3360+
} else if (callerAPI == 'OpenAI' && resp.response) { // show response or return RQs from OpenAI
33613361
const failMatch = failFlagsAndURLs.exec(resp.response)
33623362
if (failMatch) { // suggest proxy or try diff API
33633363
log.dev('Response text', resp.response)
33643364
log.error('Fail flag detected', `'${failMatch[0]}'`)
33653365
if (caller == get.reply) appAlert('openAInotWorking', 'suggestProxy')
33663366
else api.tryNew(caller)
33673367
} else {
3368-
try { // to show response or return related queries
3368+
try { // to show response or return RQs
33693369
textToShow = JSON.parse(resp.response).choices[0].message.content
33703370
handleProcessCompletion()
33713371
} catch (err) { handleProcessError(err) }
33723372
}
3373-
} else if (resp.responseText) { // show response or return related queries
3373+
} else if (resp.responseText) { // show response or return RQs from proxy API
33743374
if (callerAPI == 'GPTforLove') {
33753375
try {
3376-
const chunks = resp.responseText.trim().split('\n'),
3377-
lastChunk = JSON.parse(chunks[chunks.length -1])
3378-
if (lastChunk.id) apis.GPTforLove.parentID = lastChunk.id
3379-
textToShow = lastChunk.text ; handleProcessCompletion()
3376+
const chunkLines = resp.responseText.trim().split('\n'),
3377+
lastChunkObj = JSON.parse(chunkLines[chunkLines.length -1])
3378+
apis.GPTforLove.parentID = lastChunkObj.id || null
3379+
textToShow = lastChunkObj.text ; handleProcessCompletion()
33803380
} catch (err) { handleProcessError(err) }
33813381
} else if (callerAPI == 'MixerBox AI') {
33823382
try {
3383-
const extractedData = [...resp.responseText.matchAll(/data:(.*)/g)].map(match => match[1]
3384-
.replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
3385-
.filter(match => !/message_(?:start|end)|done/.test(match))
3386-
textToShow = extractedData.join('') ; handleProcessCompletion()
3383+
textToShow = [...resp.responseText.matchAll(/data:(.*)/g)] // arrayify data
3384+
.filter(match => !/message_(?:start|end)|done/.test(match)) // exclude signals
3385+
.map(match => // normalize whitespace
3386+
match[1].replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
3387+
.join('') // stringify AI reply text
3388+
handleProcessCompletion()
33873389
} catch (err) { handleProcessError(err) }
33883390
} else { // no processing required for all other APIs
33893391
textToShow = resp.responseText ; handleProcessCompletion() }

googlegpt/greasemonkey/googlegpt.user.js

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@
149149
// @description:zu Yengeza izimpendulo ze-AI ku-Google Search (inikwa amandla yi-Google Gemma + GPT-4o!)
150150
// @author KudoAI
151151
// @namespace https://kudoai.com
152-
// @version 2025.1.21.6
152+
// @version 2025.1.21.7
153153
// @license MIT
154154
// @icon https://assets.googlegpt.io/images/icons/googlegpt/black/icon48.png?v=59409b2
155155
// @icon64 https://assets.googlegpt.io/images/icons/googlegpt/black/icon64.png?v=59409b2
@@ -3653,33 +3653,35 @@
36533653
: resp.status == 429 ? ['tooManyRequests', 'suggestProxy']
36543654
: ['openAInotWorking', 'suggestProxy'] )
36553655
else api.tryNew(caller)
3656-
} else if (callerAPI == 'OpenAI' && resp.response) {
3656+
} else if (callerAPI == 'OpenAI' && resp.response) { // show response or return RQs from OpenAI
36573657
const failMatch = failFlagsAndURLs.exec(resp.response)
36583658
if (failMatch) { // suggest proxy or try diff API
36593659
log.dev('Response text', resp.response)
36603660
log.error('Fail flag detected', `'${failMatch[0]}'`)
36613661
if (caller == get.reply) appAlert('openAInotWorking', 'suggestProxy')
36623662
else api.tryNew(caller)
36633663
} else {
3664-
try { // to show response or return related queries
3664+
try { // to show response or return RQs
36653665
textToShow = JSON.parse(resp.response).choices[0].message.content
36663666
handleProcessCompletion()
36673667
} catch (err) { handleProcessError(err) }
36683668
}
3669-
} else if (resp.responseText) { // show response or return related queries
3669+
} else if (resp.responseText) { // show response or return RQs from proxy API
36703670
if (callerAPI == 'GPTforLove') {
36713671
try {
3672-
const chunks = resp.responseText.trim().split('\n'),
3673-
lastChunk = JSON.parse(chunks[chunks.length -1])
3674-
if (lastChunk.id) apis.GPTforLove.parentID = lastChunk.id
3675-
textToShow = lastChunk.text ; handleProcessCompletion()
3672+
const chunkLines = resp.responseText.trim().split('\n'),
3673+
lastChunkObj = JSON.parse(chunkLines[chunkLines.length -1])
3674+
apis.GPTforLove.parentID = lastChunkObj.id || null
3675+
textToShow = lastChunkObj.text ; handleProcessCompletion()
36763676
} catch (err) { handleProcessError(err) }
36773677
} else if (callerAPI == 'MixerBox AI') {
36783678
try {
3679-
const extractedData = [...resp.responseText.matchAll(/data:(.*)/g)].map(match => match[1]
3680-
.replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
3681-
.filter(match => !/message_(?:start|end)|done/.test(match))
3682-
textToShow = extractedData.join('') ; handleProcessCompletion()
3679+
textToShow = [...resp.responseText.matchAll(/data:(.*)/g)] // arrayify data
3680+
.filter(match => !/message_(?:start|end)|done/.test(match)) // exclude signals
3681+
.map(match => // normalize whitespace
3682+
match[1].replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
3683+
.join('') // stringify AI reply text
3684+
handleProcessCompletion()
36833685
} catch (err) { handleProcessError(err) }
36843686
} else { // no processing required for all other APIs
36853687
textToShow = resp.responseText ; handleProcessCompletion() }

0 commit comments

Comments
 (0)