|  | 
|  | 1 | +import React from 'react'; | 
|  | 2 | +import { AppRegistry, Button, View } from 'react-native'; | 
|  | 3 | + | 
|  | 4 | +import { getApp } from '@react-native-firebase/app'; | 
|  | 5 | +import { getVertexAI, getGenerativeModel, Schema } from '@react-native-firebase/vertexai'; | 
|  | 6 | + | 
|  | 7 | +function App() { | 
|  | 8 | +  return ( | 
|  | 9 | +    <View> | 
|  | 10 | +      <View style={{ height: 90 }} /> | 
|  | 11 | +      <Button | 
|  | 12 | +        title="Generate Content" | 
|  | 13 | +        onPress={async () => { | 
|  | 14 | +          try { | 
|  | 15 | +            const app = getApp(); | 
|  | 16 | +            const vertexai = getVertexAI(app); | 
|  | 17 | +            const model = getGenerativeModel(vertexai, { model: 'gemini-1.5-flash' }); | 
|  | 18 | + | 
|  | 19 | +            const result = await model.generateContent('What is 2 + 2?'); | 
|  | 20 | + | 
|  | 21 | +            console.log('result', result.response.text()); | 
|  | 22 | +          } catch (e) { | 
|  | 23 | +            console.error(e); | 
|  | 24 | +          } | 
|  | 25 | +        }} | 
|  | 26 | +      /> | 
|  | 27 | +      <Button | 
|  | 28 | +        title="Generate Content Stream" | 
|  | 29 | +        onPress={async () => { | 
|  | 30 | +          try { | 
|  | 31 | +            const app = getApp(); | 
|  | 32 | +            const vertexai = getVertexAI(app); | 
|  | 33 | +            const model = getGenerativeModel(vertexai, { model: 'gemini-1.5-flash' }); | 
|  | 34 | + | 
|  | 35 | +            const result = await model.generateContentStream('Write me a short, funny rap'); | 
|  | 36 | + | 
|  | 37 | +            let text = ''; | 
|  | 38 | +            for await (const chunk of result.stream) { | 
|  | 39 | +              const chunkText = chunk.text(); | 
|  | 40 | +              console.log(chunkText); | 
|  | 41 | + | 
|  | 42 | +              text += chunkText; | 
|  | 43 | +            } | 
|  | 44 | + | 
|  | 45 | +            console.log('result', text); | 
|  | 46 | +          } catch (e) { | 
|  | 47 | +            console.error(e); | 
|  | 48 | +          } | 
|  | 49 | +        }} | 
|  | 50 | +      /> | 
|  | 51 | +      <Button | 
|  | 52 | +        title="Generate Content Stream multi-modal" | 
|  | 53 | +        onPress={async () => { | 
|  | 54 | +          try { | 
|  | 55 | +            const app = getApp(); | 
|  | 56 | +            const vertexai = getVertexAI(app); | 
|  | 57 | +            const model = getGenerativeModel(vertexai, { model: 'gemini-1.5-flash' }); | 
|  | 58 | +            const prompt = 'What can you see?'; | 
|  | 59 | +            const base64Emoji = | 
|  | 60 | +              'iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAApgAAAKYB3X3/OAAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAAANCSURBVEiJtZZPbBtFFMZ/M7ubXdtdb1xSFyeilBapySVU8h8OoFaooFSqiihIVIpQBKci6KEg9Q6H9kovIHoCIVQJJCKE1ENFjnAgcaSGC6rEnxBwA04Tx43t2FnvDAfjkNibxgHxnWb2e/u992bee7tCa00YFsffekFY+nUzFtjW0LrvjRXrCDIAaPLlW0nHL0SsZtVoaF98mLrx3pdhOqLtYPHChahZcYYO7KvPFxvRl5XPp1sN3adWiD1ZAqD6XYK1b/dvE5IWryTt2udLFedwc1+9kLp+vbbpoDh+6TklxBeAi9TL0taeWpdmZzQDry0AcO+jQ12RyohqqoYoo8RDwJrU+qXkjWtfi8Xxt58BdQuwQs9qC/afLwCw8tnQbqYAPsgxE1S6F3EAIXux2oQFKm0ihMsOF71dHYx+f3NND68ghCu1YIoePPQN1pGRABkJ6Bus96CutRZMydTl+TvuiRW1m3n0eDl0vRPcEysqdXn+jsQPsrHMquGeXEaY4Yk4wxWcY5V/9scqOMOVUFthatyTy8QyqwZ+kDURKoMWxNKr2EeqVKcTNOajqKoBgOE28U4tdQl5p5bwCw7BWquaZSzAPlwjlithJtp3pTImSqQRrb2Z8PHGigD4RZuNX6JYj6wj7O4TFLbCO/Mn/m8R+h6rYSUb3ekokRY6f/YukArN979jcW+V/S8g0eT/N3VN3kTqWbQ428m9/8k0P/1aIhF36PccEl6EhOcAUCrXKZXXWS3XKd2vc/TRBG9O5ELC17MmWubD2nKhUKZa26Ba2+D3P+4/MNCFwg59oWVeYhkzgN/JDR8deKBoD7Y+ljEjGZ0sosXVTvbc6RHirr2reNy1OXd6pJsQ+gqjk8VWFYmHrwBzW/n+uMPFiRwHB2I7ih8ciHFxIkd/3Omk5tCDV1t+2nNu5sxxpDFNx+huNhVT3/zMDz8usXC3ddaHBj1GHj/As08fwTS7Kt1HBTmyN29vdwAw+/wbwLVOJ3uAD1wi/dUH7Qei66PfyuRj4Ik9is+hglfbkbfR3cnZm7chlUWLdwmprtCohX4HUtlOcQjLYCu+fzGJH2QRKvP3UNz8bWk1qMxjGTOMThZ3kvgLI5AzFfo379UAAAAASUVORK5CYII='; | 
|  | 61 | + | 
|  | 62 | +            // Call generateContentStream with the text and images | 
|  | 63 | +            const response = await model.generateContentStream([ | 
|  | 64 | +              prompt, | 
|  | 65 | +              { inlineData: { mimeType: 'image/png', data: base64Emoji } }, | 
|  | 66 | +            ]); | 
|  | 67 | + | 
|  | 68 | +            let text = ''; | 
|  | 69 | +            for await (const chunk of response.stream) { | 
|  | 70 | +              text += chunk.text(); | 
|  | 71 | +            } | 
|  | 72 | + | 
|  | 73 | +            console.log('Generated text:', text); | 
|  | 74 | +          } catch (e) { | 
|  | 75 | +            console.error(e); | 
|  | 76 | +          } | 
|  | 77 | +        }} | 
|  | 78 | +      /> | 
|  | 79 | +      <Button | 
|  | 80 | +        title="Generate JSON Response" | 
|  | 81 | +        onPress={async () => { | 
|  | 82 | +          try { | 
|  | 83 | +            const app = getApp(); | 
|  | 84 | +            const vertexai = getVertexAI(app); | 
|  | 85 | +            const jsonSchema = Schema.object({ | 
|  | 86 | +              properties: { | 
|  | 87 | +                characters: Schema.array({ | 
|  | 88 | +                  items: Schema.object({ | 
|  | 89 | +                    properties: { | 
|  | 90 | +                      name: Schema.string(), | 
|  | 91 | +                      accessory: Schema.string(), | 
|  | 92 | +                      age: Schema.number(), | 
|  | 93 | +                      species: Schema.string(), | 
|  | 94 | +                    }, | 
|  | 95 | +                    optionalProperties: ['accessory'], | 
|  | 96 | +                  }), | 
|  | 97 | +                }), | 
|  | 98 | +              }, | 
|  | 99 | +            }); | 
|  | 100 | +            const model = getGenerativeModel(vertexai, { | 
|  | 101 | +              model: 'gemini-1.5-flash', | 
|  | 102 | +              generationConfig: { | 
|  | 103 | +                responseMimeType: 'application/json', | 
|  | 104 | +                responseSchema: jsonSchema, | 
|  | 105 | +              }, | 
|  | 106 | +            }); | 
|  | 107 | + | 
|  | 108 | +            let prompt = "For use in a children's card game, generate 10 animal-based characters."; | 
|  | 109 | + | 
|  | 110 | +            let result = await model.generateContent(prompt); | 
|  | 111 | +            console.log(result.response.text()); | 
|  | 112 | +          } catch (e) { | 
|  | 113 | +            console.error(e); | 
|  | 114 | +          } | 
|  | 115 | +        }} | 
|  | 116 | +      /> | 
|  | 117 | +      <Button | 
|  | 118 | +        title="Start Chat" | 
|  | 119 | +        onPress={async () => { | 
|  | 120 | +          try { | 
|  | 121 | +            const app = getApp(); | 
|  | 122 | +            const vertexai = getVertexAI(app); | 
|  | 123 | +            const model = getGenerativeModel(vertexai, { model: 'gemini-1.5-flash' }); | 
|  | 124 | + | 
|  | 125 | +            const chat = model.startChat({ | 
|  | 126 | +              history: [ | 
|  | 127 | +                { | 
|  | 128 | +                  role: 'user', | 
|  | 129 | +                  parts: [{ text: 'Hello, I have 2 dogs in my house.' }], | 
|  | 130 | +                }, | 
|  | 131 | +                { | 
|  | 132 | +                  role: 'model', | 
|  | 133 | +                  parts: [{ text: 'Great to meet you. What would you like to know?' }], | 
|  | 134 | +                }, | 
|  | 135 | +              ], | 
|  | 136 | +              generationConfig: { | 
|  | 137 | +                maxOutputTokens: 100, | 
|  | 138 | +              }, | 
|  | 139 | +            }); | 
|  | 140 | + | 
|  | 141 | +            const msg = 'How many paws are in my house?'; | 
|  | 142 | +            const result = await chat.sendMessageStream(msg); | 
|  | 143 | + | 
|  | 144 | +            let text = ''; | 
|  | 145 | +            for await (const chunk of result.stream) { | 
|  | 146 | +              const chunkText = chunk.text(); | 
|  | 147 | +              text += chunkText; | 
|  | 148 | +            } | 
|  | 149 | +            console.log(text); | 
|  | 150 | +            chat.getHistory(); | 
|  | 151 | +          } catch (e) { | 
|  | 152 | +            console.error(e); | 
|  | 153 | +          } | 
|  | 154 | +        }} | 
|  | 155 | +      /> | 
|  | 156 | +      <Button | 
|  | 157 | +        title="Count Tokens" | 
|  | 158 | +        onPress={async () => { | 
|  | 159 | +          try { | 
|  | 160 | +            const app = getApp(); | 
|  | 161 | +            const vertexai = getVertexAI(app); | 
|  | 162 | +            const model = getGenerativeModel(vertexai, { model: 'gemini-1.5-flash' }); | 
|  | 163 | + | 
|  | 164 | +            const result = await model.countTokens('What is 2 + 2?'); | 
|  | 165 | + | 
|  | 166 | +            console.log('totalBillableCharacters', result.totalBillableCharacters); | 
|  | 167 | +            console.log('totalTokens', result.totalTokens); | 
|  | 168 | +          } catch (e) { | 
|  | 169 | +            console.error(e); | 
|  | 170 | +          } | 
|  | 171 | +        }} | 
|  | 172 | +      /> | 
|  | 173 | + | 
|  | 174 | +      <Button | 
|  | 175 | +        title="Function Calling" | 
|  | 176 | +        onPress={async () => { | 
|  | 177 | +          // This function calls a hypothetical external API that returns | 
|  | 178 | +          // a collection of weather information for a given location on a given date. | 
|  | 179 | +          // `location` is an object of the form { city: string, state: string } | 
|  | 180 | +          async function fetchWeather({ location, date }) { | 
|  | 181 | +            // For demo purposes, this hypothetical response is hardcoded here in the expected format. | 
|  | 182 | +            return { | 
|  | 183 | +              temperature: 38, | 
|  | 184 | +              chancePrecipitation: '56%', | 
|  | 185 | +              cloudConditions: 'partlyCloudy', | 
|  | 186 | +            }; | 
|  | 187 | +          } | 
|  | 188 | +          const fetchWeatherTool = { | 
|  | 189 | +            functionDeclarations: [ | 
|  | 190 | +              { | 
|  | 191 | +                name: 'fetchWeather', | 
|  | 192 | +                description: 'Get the weather conditions for a specific city on a specific date', | 
|  | 193 | +                parameters: Schema.object({ | 
|  | 194 | +                  properties: { | 
|  | 195 | +                    location: Schema.object({ | 
|  | 196 | +                      description: | 
|  | 197 | +                        'The name of the city and its state for which to get ' + | 
|  | 198 | +                        'the weather. Only cities in the USA are supported.', | 
|  | 199 | +                      properties: { | 
|  | 200 | +                        city: Schema.string({ | 
|  | 201 | +                          description: 'The city of the location.', | 
|  | 202 | +                        }), | 
|  | 203 | +                        state: Schema.string({ | 
|  | 204 | +                          description: 'The US state of the location.', | 
|  | 205 | +                        }), | 
|  | 206 | +                      }, | 
|  | 207 | +                    }), | 
|  | 208 | +                    date: Schema.string({ | 
|  | 209 | +                      description: | 
|  | 210 | +                        'The date for which to get the weather. Date must be in the' + | 
|  | 211 | +                        ' format: YYYY-MM-DD.', | 
|  | 212 | +                    }), | 
|  | 213 | +                  }, | 
|  | 214 | +                }), | 
|  | 215 | +              }, | 
|  | 216 | +            ], | 
|  | 217 | +          }; | 
|  | 218 | +          try { | 
|  | 219 | +            const app = getApp(); | 
|  | 220 | +            const vertexai = getVertexAI(app); | 
|  | 221 | +            const model = getGenerativeModel(vertexai, { | 
|  | 222 | +              model: 'gemini-1.5-flash', | 
|  | 223 | +              tools: fetchWeatherTool, | 
|  | 224 | +            }); | 
|  | 225 | + | 
|  | 226 | +            const chat = model.startChat(); | 
|  | 227 | +            const prompt = 'What was the weather in Boston on October 17, 2024?'; | 
|  | 228 | + | 
|  | 229 | +            // Send the user's question (the prompt) to the model using multi-turn chat. | 
|  | 230 | +            let result = await chat.sendMessage(prompt); | 
|  | 231 | +            const functionCalls = result.response.functionCalls(); | 
|  | 232 | +            let functionCall; | 
|  | 233 | +            let functionResult; | 
|  | 234 | +            // When the model responds with one or more function calls, invoke the function(s). | 
|  | 235 | +            if (functionCalls.length > 0) { | 
|  | 236 | +              for (const call of functionCalls) { | 
|  | 237 | +                if (call.name === 'fetchWeather') { | 
|  | 238 | +                  // Forward the structured input data prepared by the model | 
|  | 239 | +                  // to the hypothetical external API. | 
|  | 240 | +                  functionResult = await fetchWeather(call.args); | 
|  | 241 | +                  functionCall = call; | 
|  | 242 | +                } | 
|  | 243 | +              } | 
|  | 244 | +            } | 
|  | 245 | +            result = await chat.sendMessage([ | 
|  | 246 | +              { | 
|  | 247 | +                functionResponse: { | 
|  | 248 | +                  name: functionCall.name, // "fetchWeather" | 
|  | 249 | +                  response: functionResult, | 
|  | 250 | +                }, | 
|  | 251 | +              }, | 
|  | 252 | +            ]); | 
|  | 253 | +            console.log(result.response.text()); | 
|  | 254 | +          } catch (e) { | 
|  | 255 | +            console.error(e); | 
|  | 256 | +          } | 
|  | 257 | +        }} | 
|  | 258 | +      /> | 
|  | 259 | +    </View> | 
|  | 260 | +  ); | 
|  | 261 | +} | 
|  | 262 | + | 
|  | 263 | +AppRegistry.registerComponent('testing', () => App); | 
0 commit comments