diff --git a/invokeai/frontend/web/src/app/components/GlobalHookIsolator.tsx b/invokeai/frontend/web/src/app/components/GlobalHookIsolator.tsx index 6efde2ead8c..5dfcdcab5b4 100644 --- a/invokeai/frontend/web/src/app/components/GlobalHookIsolator.tsx +++ b/invokeai/frontend/web/src/app/components/GlobalHookIsolator.tsx @@ -9,6 +9,7 @@ import { useAppDispatch, useAppSelector } from 'app/store/storeHooks'; import type { PartialAppConfig } from 'app/types/invokeai'; import { useFocusRegionWatcher } from 'common/hooks/focus'; import { useGlobalHotkeys } from 'common/hooks/useGlobalHotkeys'; +import { useDynamicPromptsWatcher } from 'features/dynamicPrompts/hooks/useDynamicPromptsWatcher'; import { useStarterModelsToast } from 'features/modelManagerV2/hooks/useStarterModelsToast'; import { useWorkflowBuilderWatcher } from 'features/nodes/components/sidePanel/workflow/IsolatedWorkflowBuilderWatcher'; import { useReadinessWatcher } from 'features/queue/store/readiness'; @@ -58,6 +59,7 @@ export const GlobalHookIsolator = memo( useSyncQueueStatus(); useFocusRegionWatcher(); useWorkflowBuilderWatcher(); + useDynamicPromptsWatcher(); return null; } diff --git a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/index.ts b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/index.ts index bdec74d65db..2d8942f9e5a 100644 --- a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/index.ts +++ b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/index.ts @@ -22,7 +22,6 @@ import { addImageToDeleteSelectedListener } from 'app/store/middleware/listenerM import { addImageUploadedFulfilledListener } from 'app/store/middleware/listenerMiddleware/listeners/imageUploaded'; import { addModelSelectedListener } from 'app/store/middleware/listenerMiddleware/listeners/modelSelected'; import { addModelsLoadedListener } from 'app/store/middleware/listenerMiddleware/listeners/modelsLoaded'; -import { addDynamicPromptsListener } from 'app/store/middleware/listenerMiddleware/listeners/promptChanged'; import { addSetDefaultSettingsListener } from 'app/store/middleware/listenerMiddleware/listeners/setDefaultSettings'; import { addSocketConnectedEventListener } from 'app/store/middleware/listenerMiddleware/listeners/socketConnected'; import type { AppDispatch, RootState } from 'app/store/store'; @@ -95,7 +94,4 @@ addAppConfigReceivedListener(startAppListening); // Ad-hoc upscale workflwo addAdHocPostProcessingRequestedListener(startAppListening); -// Prompts -addDynamicPromptsListener(startAppListening); - addSetDefaultSettingsListener(startAppListening); diff --git a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/promptChanged.ts b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/promptChanged.ts deleted file mode 100644 index 0be242f49d3..00000000000 --- a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/promptChanged.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { isAnyOf } from '@reduxjs/toolkit'; -import type { AppStartListening } from 'app/store/middleware/listenerMiddleware'; -import { positivePromptChanged } from 'features/controlLayers/store/paramsSlice'; -import { - combinatorialToggled, - isErrorChanged, - isLoadingChanged, - maxPromptsChanged, - maxPromptsReset, - parsingErrorChanged, - promptsChanged, -} from 'features/dynamicPrompts/store/dynamicPromptsSlice'; -import { getShouldProcessPrompt } from 'features/dynamicPrompts/util/getShouldProcessPrompt'; -import { getPresetModifiedPrompts } from 'features/nodes/util/graph/graphBuilderUtils'; -import { activeStylePresetIdChanged } from 'features/stylePresets/store/stylePresetSlice'; -import { stylePresetsApi } from 'services/api/endpoints/stylePresets'; -import { utilitiesApi } from 'services/api/endpoints/utilities'; - -import { socketConnected } from './socketConnected'; - -const matcher = isAnyOf( - positivePromptChanged, - combinatorialToggled, - maxPromptsChanged, - maxPromptsReset, - socketConnected, - activeStylePresetIdChanged, - stylePresetsApi.endpoints.listStylePresets.matchFulfilled -); - -export const addDynamicPromptsListener = (startAppListening: AppStartListening) => { - startAppListening({ - matcher, - effect: async (action, { dispatch, getState, cancelActiveListeners, delay }) => { - cancelActiveListeners(); - const state = getState(); - const { positivePrompt } = getPresetModifiedPrompts(state); - const { maxPrompts } = state.dynamicPrompts; - - if (state.config.disabledFeatures.includes('dynamicPrompting')) { - return; - } - - const cachedPrompts = utilitiesApi.endpoints.dynamicPrompts.select({ - prompt: positivePrompt, - max_prompts: maxPrompts, - })(state).data; - - if (cachedPrompts) { - dispatch(promptsChanged(cachedPrompts.prompts)); - dispatch(parsingErrorChanged(cachedPrompts.error)); - return; - } - - if (!getShouldProcessPrompt(positivePrompt)) { - dispatch(promptsChanged([positivePrompt])); - dispatch(parsingErrorChanged(undefined)); - dispatch(isErrorChanged(false)); - return; - } - - if (!state.dynamicPrompts.isLoading) { - dispatch(isLoadingChanged(true)); - } - - // debounce request - await delay(1000); - - try { - const req = dispatch( - utilitiesApi.endpoints.dynamicPrompts.initiate({ - prompt: positivePrompt, - max_prompts: maxPrompts, - }) - ); - - const res = await req.unwrap(); - req.unsubscribe(); - - dispatch(promptsChanged(res.prompts)); - dispatch(parsingErrorChanged(res.error)); - dispatch(isErrorChanged(false)); - } catch { - dispatch(isErrorChanged(true)); - dispatch(isLoadingChanged(false)); - } - }, - }); -}; diff --git a/invokeai/frontend/web/src/features/dynamicPrompts/hooks/useDynamicPromptsWatcher.tsx b/invokeai/frontend/web/src/features/dynamicPrompts/hooks/useDynamicPromptsWatcher.tsx new file mode 100644 index 00000000000..bdfcd3e38a4 --- /dev/null +++ b/invokeai/frontend/web/src/features/dynamicPrompts/hooks/useDynamicPromptsWatcher.tsx @@ -0,0 +1,95 @@ +import { useAppStore } from 'app/store/nanostores/store'; +import { useAppSelector } from 'app/store/storeHooks'; +import { + isErrorChanged, + isLoadingChanged, + parsingErrorChanged, + promptsChanged, + selectDynamicPromptsMaxPrompts, +} from 'features/dynamicPrompts/store/dynamicPromptsSlice'; +import { getShouldProcessPrompt } from 'features/dynamicPrompts/util/getShouldProcessPrompt'; +import { selectPresetModifiedPrompts } from 'features/nodes/util/graph/graphBuilderUtils'; +import { useFeatureStatus } from 'features/system/hooks/useFeatureStatus'; +import { debounce } from 'lodash-es'; +import { useEffect, useMemo } from 'react'; +import { utilitiesApi } from 'services/api/endpoints/utilities'; + +const DYNAMIC_PROMPTS_DEBOUNCE_MS = 1000; + +/** + * This hook watches for changes to state that should trigger dynamic prompts to be updated. + */ +export const useDynamicPromptsWatcher = () => { + const { getState, dispatch } = useAppStore(); + // The prompt to process is derived from the preset-modified prompts + const presetModifiedPrompts = useAppSelector(selectPresetModifiedPrompts); + const maxPrompts = useAppSelector(selectDynamicPromptsMaxPrompts); + + const dynamicPrompting = useFeatureStatus('dynamicPrompting'); + + const debouncedUpdateDynamicPrompts = useMemo( + () => + debounce(async (positivePrompt: string, maxPrompts: number) => { + // Try to fetch the dynamic prompts and store in state + try { + const req = dispatch( + utilitiesApi.endpoints.dynamicPrompts.initiate( + { + prompt: positivePrompt, + max_prompts: maxPrompts, + }, + { subscribe: false } + ) + ); + + const res = await req.unwrap(); + + dispatch(promptsChanged(res.prompts)); + dispatch(parsingErrorChanged(res.error)); + dispatch(isErrorChanged(false)); + } catch { + dispatch(isErrorChanged(true)); + dispatch(isLoadingChanged(false)); + } + }, DYNAMIC_PROMPTS_DEBOUNCE_MS), + [dispatch] + ); + + useEffect(() => { + if (!dynamicPrompting) { + return; + } + + const { positivePrompt } = presetModifiedPrompts; + + // Before we execute, imperatively check the dynamic prompts query cache to see if we have already fetched this prompt + const state = getState(); + + const cachedPrompts = utilitiesApi.endpoints.dynamicPrompts.select({ + prompt: positivePrompt, + max_prompts: maxPrompts, + })(state).data; + + if (cachedPrompts) { + // Yep we already did this prompt, use the cached result + dispatch(promptsChanged(cachedPrompts.prompts)); + dispatch(parsingErrorChanged(cachedPrompts.error)); + return; + } + + // If the prompt is not in the cache, check if we should process it - this is just looking for dynamic prompts syntax + if (!getShouldProcessPrompt(positivePrompt)) { + dispatch(promptsChanged([positivePrompt])); + dispatch(parsingErrorChanged(undefined)); + dispatch(isErrorChanged(false)); + return; + } + + // If we are here, we need to process the prompt + if (!state.dynamicPrompts.isLoading) { + dispatch(isLoadingChanged(true)); + } + + debouncedUpdateDynamicPrompts(positivePrompt, maxPrompts); + }, [debouncedUpdateDynamicPrompts, dispatch, dynamicPrompting, getState, maxPrompts, presetModifiedPrompts]); +}; diff --git a/invokeai/frontend/web/src/features/dynamicPrompts/store/dynamicPromptsSlice.ts b/invokeai/frontend/web/src/features/dynamicPrompts/store/dynamicPromptsSlice.ts index fd2a9d0ecdd..0ce2a92a703 100644 --- a/invokeai/frontend/web/src/features/dynamicPrompts/store/dynamicPromptsSlice.ts +++ b/invokeai/frontend/web/src/features/dynamicPrompts/store/dynamicPromptsSlice.ts @@ -36,12 +36,6 @@ export const dynamicPromptsSlice = createSlice({ maxPromptsChanged: (state, action: PayloadAction) => { state.maxPrompts = action.payload; }, - maxPromptsReset: (state) => { - state.maxPrompts = initialDynamicPromptsState.maxPrompts; - }, - combinatorialToggled: (state) => { - state.combinatorial = !state.combinatorial; - }, promptsChanged: (state, action: PayloadAction) => { state.prompts = action.payload; state.isLoading = false; @@ -63,8 +57,6 @@ export const dynamicPromptsSlice = createSlice({ export const { maxPromptsChanged, - maxPromptsReset, - combinatorialToggled, promptsChanged, parsingErrorChanged, isErrorChanged, diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildMultidiffusionUpscaleGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildMultidiffusionUpscaleGraph.ts index 1bd7548e401..c63f6fc582b 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildMultidiffusionUpscaleGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildMultidiffusionUpscaleGraph.ts @@ -8,7 +8,7 @@ import { isNonRefinerMainModelConfig, isSpandrelImageToImageModelConfig } from ' import { assert } from 'tsafe'; import { addLoRAs } from './generation/addLoRAs'; -import { getBoardField, getPresetModifiedPrompts } from './graphBuilderUtils'; +import { getBoardField, selectPresetModifiedPrompts } from './graphBuilderUtils'; export const buildMultidiffusionUpscaleGraph = async ( state: RootState @@ -97,7 +97,7 @@ export const buildMultidiffusionUpscaleGraph = async ( if (model.base === 'sdxl') { const { positivePrompt, negativePrompt, positiveStylePrompt, negativeStylePrompt } = - getPresetModifiedPrompts(state); + selectPresetModifiedPrompts(state); posCond = g.addNode({ type: 'sdxl_compel_prompt', @@ -130,7 +130,7 @@ export const buildMultidiffusionUpscaleGraph = async ( negative_style_prompt: negativeStylePrompt, }); } else { - const { positivePrompt, negativePrompt } = getPresetModifiedPrompts(state); + const { positivePrompt, negativePrompt } = selectPresetModifiedPrompts(state); posCond = g.addNode({ type: 'compel', diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildCogView4Graph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildCogView4Graph.ts index 649ae3d7097..25a073e32c9 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildCogView4Graph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildCogView4Graph.ts @@ -16,8 +16,8 @@ import { Graph } from 'features/nodes/util/graph/generation/Graph'; import { CANVAS_OUTPUT_PREFIX, getBoardField, - getPresetModifiedPrompts, getSizes, + selectPresetModifiedPrompts, } from 'features/nodes/util/graph/graphBuilderUtils'; import type { ImageOutputNodes } from 'features/nodes/util/graph/types'; import type { Invocation } from 'services/api/types'; @@ -45,7 +45,7 @@ export const buildCogView4Graph = async ( assert(model, 'No model found in state'); const { originalSize, scaledSize } = getSizes(bbox); - const { positivePrompt, negativePrompt } = getPresetModifiedPrompts(state); + const { positivePrompt, negativePrompt } = selectPresetModifiedPrompts(state); const g = new Graph(getPrefixedId('cogview4_graph')); const modelLoader = g.addNode({ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildFLUXGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildFLUXGraph.ts index 7de93d21672..1af977fb1f3 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildFLUXGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildFLUXGraph.ts @@ -19,8 +19,8 @@ import { Graph } from 'features/nodes/util/graph/generation/Graph'; import { CANVAS_OUTPUT_PREFIX, getBoardField, - getPresetModifiedPrompts, getSizes, + selectPresetModifiedPrompts, } from 'features/nodes/util/graph/graphBuilderUtils'; import type { ImageOutputNodes } from 'features/nodes/util/graph/types'; import { t } from 'i18next'; @@ -91,7 +91,7 @@ export const buildFLUXGraph = async ( guidance = 30; } - const { positivePrompt } = getPresetModifiedPrompts(state); + const { positivePrompt } = selectPresetModifiedPrompts(state); const g = new Graph(getPrefixedId('flux_graph')); const modelLoader = g.addNode({ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSD1Graph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSD1Graph.ts index 25f7accf1a6..880193ea964 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSD1Graph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSD1Graph.ts @@ -20,8 +20,8 @@ import { Graph } from 'features/nodes/util/graph/generation/Graph'; import { CANVAS_OUTPUT_PREFIX, getBoardField, - getPresetModifiedPrompts, getSizes, + selectPresetModifiedPrompts, } from 'features/nodes/util/graph/graphBuilderUtils'; import type { ImageOutputNodes } from 'features/nodes/util/graph/types'; import { selectMainModelConfig } from 'services/api/endpoints/models'; @@ -62,7 +62,7 @@ export const buildSD1Graph = async ( assert(model, 'No model found in state'); const fp32 = vaePrecision === 'fp32'; - const { positivePrompt, negativePrompt } = getPresetModifiedPrompts(state); + const { positivePrompt, negativePrompt } = selectPresetModifiedPrompts(state); const { originalSize, scaledSize } = getSizes(bbox); const g = new Graph(getPrefixedId('sd1_graph')); diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSD3Graph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSD3Graph.ts index 04af3a5f00b..dd64aaf48e4 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSD3Graph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSD3Graph.ts @@ -15,8 +15,8 @@ import { Graph } from 'features/nodes/util/graph/generation/Graph'; import { CANVAS_OUTPUT_PREFIX, getBoardField, - getPresetModifiedPrompts, getSizes, + selectPresetModifiedPrompts, } from 'features/nodes/util/graph/graphBuilderUtils'; import type { ImageOutputNodes } from 'features/nodes/util/graph/types'; import { selectMainModelConfig } from 'services/api/endpoints/models'; @@ -56,7 +56,7 @@ export const buildSD3Graph = async ( } = params; const { originalSize, scaledSize } = getSizes(bbox); - const { positivePrompt, negativePrompt } = getPresetModifiedPrompts(state); + const { positivePrompt, negativePrompt } = selectPresetModifiedPrompts(state); const g = new Graph(getPrefixedId('sd3_graph')); const modelLoader = g.addNode({ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSDXLGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSDXLGraph.ts index 4774fe80cca..55e4ec5a294 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSDXLGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/generation/buildSDXLGraph.ts @@ -20,8 +20,8 @@ import { Graph } from 'features/nodes/util/graph/generation/Graph'; import { CANVAS_OUTPUT_PREFIX, getBoardField, - getPresetModifiedPrompts, getSizes, + selectPresetModifiedPrompts, } from 'features/nodes/util/graph/graphBuilderUtils'; import type { ImageOutputNodes } from 'features/nodes/util/graph/types'; import { selectMainModelConfig } from 'services/api/endpoints/models'; @@ -67,7 +67,8 @@ export const buildSDXLGraph = async ( const fp32 = vaePrecision === 'fp32'; const { originalSize, scaledSize } = getSizes(bbox); - const { positivePrompt, negativePrompt, positiveStylePrompt, negativeStylePrompt } = getPresetModifiedPrompts(state); + const { positivePrompt, negativePrompt, positiveStylePrompt, negativeStylePrompt } = + selectPresetModifiedPrompts(state); const g = new Graph(getPrefixedId('sdxl_graph')); const modelLoader = g.addNode({ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/graphBuilderUtils.ts b/invokeai/frontend/web/src/features/nodes/util/graph/graphBuilderUtils.ts index 7686b99123d..c288f80bb7a 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/graphBuilderUtils.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/graphBuilderUtils.ts @@ -1,11 +1,13 @@ +import { createSelector } from '@reduxjs/toolkit'; import type { RootState } from 'app/store/store'; -import type { ParamsState } from 'features/controlLayers/store/paramsSlice'; +import { type ParamsState, selectParamsSlice } from 'features/controlLayers/store/paramsSlice'; import type { CanvasState } from 'features/controlLayers/store/types'; import type { BoardField } from 'features/nodes/types/common'; import type { Graph } from 'features/nodes/util/graph/generation/Graph'; import { buildPresetModifiedPrompt } from 'features/stylePresets/hooks/usePresetModifiedPrompts'; +import { selectStylePresetSlice } from 'features/stylePresets/store/stylePresetSlice'; import { pick } from 'lodash-es'; -import { stylePresetsApi } from 'services/api/endpoints/stylePresets'; +import { selectListStylePresetsRequestState } from 'services/api/endpoints/stylePresets'; import type { Invocation } from 'services/api/types'; import { assert } from 'tsafe'; @@ -25,44 +27,47 @@ export const getBoardField = (state: RootState): BoardField | undefined => { /** * Gets the prompts, modified for the active style preset. */ -export const getPresetModifiedPrompts = ( - state: RootState -): { positivePrompt: string; negativePrompt: string; positiveStylePrompt?: string; negativeStylePrompt?: string } => { - const { positivePrompt, negativePrompt, positivePrompt2, negativePrompt2, shouldConcatPrompts } = state.params; - const { activeStylePresetId } = state.stylePreset; - - if (activeStylePresetId) { - const { data } = stylePresetsApi.endpoints.listStylePresets.select()(state); - - const activeStylePreset = data?.find((item) => item.id === activeStylePresetId); - - if (activeStylePreset) { - const presetModifiedPositivePrompt = buildPresetModifiedPrompt( - activeStylePreset.preset_data.positive_prompt, - positivePrompt - ); - - const presetModifiedNegativePrompt = buildPresetModifiedPrompt( - activeStylePreset.preset_data.negative_prompt, - negativePrompt - ); - - return { - positivePrompt: presetModifiedPositivePrompt, - negativePrompt: presetModifiedNegativePrompt, - positiveStylePrompt: shouldConcatPrompts ? presetModifiedPositivePrompt : positivePrompt2, - negativeStylePrompt: shouldConcatPrompts ? presetModifiedNegativePrompt : negativePrompt2, - }; +export const selectPresetModifiedPrompts = createSelector( + selectParamsSlice, + selectStylePresetSlice, + selectListStylePresetsRequestState, + (params, stylePresetSlice, listStylePresetsRequestState) => { + const { positivePrompt, negativePrompt, positivePrompt2, negativePrompt2, shouldConcatPrompts } = params; + const { activeStylePresetId } = stylePresetSlice; + + if (activeStylePresetId) { + const { data } = listStylePresetsRequestState; + + const activeStylePreset = data?.find((item) => item.id === activeStylePresetId); + + if (activeStylePreset) { + const presetModifiedPositivePrompt = buildPresetModifiedPrompt( + activeStylePreset.preset_data.positive_prompt, + positivePrompt + ); + + const presetModifiedNegativePrompt = buildPresetModifiedPrompt( + activeStylePreset.preset_data.negative_prompt, + negativePrompt + ); + + return { + positivePrompt: presetModifiedPositivePrompt, + negativePrompt: presetModifiedNegativePrompt, + positiveStylePrompt: shouldConcatPrompts ? presetModifiedPositivePrompt : positivePrompt2, + negativeStylePrompt: shouldConcatPrompts ? presetModifiedNegativePrompt : negativePrompt2, + }; + } } - } - return { - positivePrompt, - negativePrompt, - positiveStylePrompt: shouldConcatPrompts ? positivePrompt : positivePrompt2, - negativeStylePrompt: shouldConcatPrompts ? negativePrompt : negativePrompt2, - }; -}; + return { + positivePrompt, + negativePrompt, + positiveStylePrompt: shouldConcatPrompts ? positivePrompt : positivePrompt2, + negativeStylePrompt: shouldConcatPrompts ? negativePrompt : negativePrompt2, + }; + } +); export const getSizes = (bboxState: CanvasState['bbox']) => { const originalSize = pick(bboxState.rect, 'width', 'height'); diff --git a/invokeai/frontend/web/src/features/stylePresets/store/stylePresetSlice.ts b/invokeai/frontend/web/src/features/stylePresets/store/stylePresetSlice.ts index 72dbcd25f1a..a0e6eb4002e 100644 --- a/invokeai/frontend/web/src/features/stylePresets/store/stylePresetSlice.ts +++ b/invokeai/frontend/web/src/features/stylePresets/store/stylePresetSlice.ts @@ -70,7 +70,7 @@ export const stylePresetPersistConfig: PersistConfig = { persistDenylist: [], }; -const selectStylePresetSlice = (state: RootState) => state.stylePreset; +export const selectStylePresetSlice = (state: RootState) => state.stylePreset; const createStylePresetSelector = (selector: Selector) => createSelector(selectStylePresetSlice, selector); diff --git a/invokeai/frontend/web/src/services/api/endpoints/stylePresets.ts b/invokeai/frontend/web/src/services/api/endpoints/stylePresets.ts index 5e3ab54b040..25d80dc47e2 100644 --- a/invokeai/frontend/web/src/services/api/endpoints/stylePresets.ts +++ b/invokeai/frontend/web/src/services/api/endpoints/stylePresets.ts @@ -146,3 +146,5 @@ export const getStylePreset = ( const req = getStore().dispatch(stylePresetsApi.endpoints.getStylePreset.initiate(style_preset_id, _options)); return req.unwrap(); }; + +export const selectListStylePresetsRequestState = stylePresetsApi.endpoints.listStylePresets.select(); diff --git a/invokeai/frontend/web/src/services/api/endpoints/utilities.ts b/invokeai/frontend/web/src/services/api/endpoints/utilities.ts index 0b8839032cf..9fdedd914a1 100644 --- a/invokeai/frontend/web/src/services/api/endpoints/utilities.ts +++ b/invokeai/frontend/web/src/services/api/endpoints/utilities.ts @@ -21,7 +21,6 @@ export const utilitiesApi = api.injectEndpoints({ body: arg, method: 'POST', }), - keepUnusedDataFor: 86400, // 24 hours // We need to fetch this on reconnect bc the user may have changed the text field while // disconnected. providesTags: ['FetchOnReconnect'],